code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
user = request.session['%suser' % SESSION_KEY] profile = request.session['%sprofile' % SESSION_KEY] client = request.session['%sclient' % SESSION_KEY] return user, profile, client
def get_session_data(self, request)
Return a tuple ``(user, profile, client)`` from the session.
3.304874
2.212455
1.493759
signals.login.send(sender=profile.__class__, user=user, profile=profile, client=client, request=request)
def send_login_signal(self, request, user, profile, client)
Send a signal that a user logged in. This signal should be sent only if the user was *not* logged into Django.
3.331217
3.915392
0.850801
signals.connect.send(sender=profile.__class__, user=user, profile=profile, client=client, request=request)
def send_connect_signal(self, request, user, profile, client)
Send a signal that a user connected a social profile to his Django account. This signal should be sent *only* when the a new social connection was created.
3.234684
3.132362
1.032666
return self.request(self.access_token_url, method="GET", params=params)
def request_access_token(self, params)
Foursquare does not accept POST requests to retrieve an access token, so we'll be doing a GET request instead.
4.254121
3.208281
1.325982
bits = get_bits(token) if len(bits) > 1: return FormNode(bits[0], bits[1:]) if len(bits) == 1: return FormNode(bits[0]) return FormNode(None)
def openid_form(parser, token)
Render OpenID form. Allows to pre set the provider:: {% openid_form "https://www.google.com/accounts/o8/id" %} Also creates custom button URLs by concatenating all arguments after the provider's URL {% openid_form "https://www.google.com/accounts/o8/id" STATIC_URL "image/for/google.jpg" %}
3.114756
3.651383
0.853034
if INITAL_DATA_FUNCTION: func = self.import_attribute(INITAL_DATA_FUNCTION) return func(request, user, profile, client) return {}
def get_initial_data(self, request, user, profile, client)
Return initial data for the setup form. The function can be controlled with ``SOCIALREGISTRATION_INITIAL_DATA_FUNCTION``. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client
5.106352
5.495622
0.929167
if CONTEXT_FUNCTION: func = self.import_attribute(CONTEXT_FUNCTION) return func(request, user, profile, client) return {}
def get_context(self, request, user, profile, client)
Return additional context for the setup view. The function can be controlled with ``SOCIALREGISTRATION_SETUP_CONTEXT_FUNCTION``. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client
6.123032
6.389431
0.958306
func = self.get_username_function() user.username = func(user, profile, client) user.set_unusable_password() user.save() profile.user = user profile.save() user = profile.authenticate() self.send_connect_signal(request, user, profile, client) self.login(request, user) self.send_login_signal(request, user, profile, client) self.delete_session_data(request) return HttpResponseRedirect(self.get_next(request))
def generate_username_and_redirect(self, request, user, profile, client)
Generate a username and then redirect the user to the correct place. This method is called when ``SOCIALREGISTRATION_GENERATE_USERNAME`` is set. :param request: The current request object :param user: The unsaved user object :param profile: The unsaved profile object :param client: The API client
2.928929
3.316171
0.883226
if request.user.is_authenticated(): return HttpResponseRedirect(self.get_next(request)) try: user, profile, client = self.get_session_data(request) except KeyError: return self.error_to_response(request, dict( error=_("Social profile is missing from your session."))) if GENERATE_USERNAME: return self.generate_username_and_redirect(request, user, profile, client) form = self.get_form()(initial=self.get_initial_data(request, user, profile, client)) additional_context = self.get_context(request, user, profile, client) return self.render_to_response(dict({'form': form}, **additional_context))
def get(self, request)
When signing a new user up - either display a setup form, or generate the username automatically.
4.068197
3.911828
1.039973
if request.user.is_authenticated(): return self.error_to_response(request, dict( error=_("You are already logged in."))) try: user, profile, client = self.get_session_data(request) except KeyError: return self.error_to_response(request, dict( error=_("A social profile is missing from your session."))) form = self.get_form()(request.POST, request.FILES, initial=self.get_initial_data(request, user, profile, client)) if not form.is_valid(): additional_context = self.get_context(request, user, profile, client) return self.render_to_response(dict({'form': form}, **additional_context)) user, profile = form.save(request, user, profile, client) user = profile.authenticate() self.send_connect_signal(request, user, profile, client) self.login(request, user) self.send_login_signal(request, user, profile, client) self.delete_session_data(request) return HttpResponseRedirect(self.get_next(request))
def post(self, request)
Save the user and profile, login and send the right signals.
3.000827
2.869673
1.045704
request.session['next'] = self.get_next(request) client = self.get_client()() request.session[self.get_client().get_session_key()] = client url = client.get_redirect_url(request=request) logger.debug("Redirecting to %s", url) try: return HttpResponseRedirect(url) except OAuthError, error: return self.error_to_response(request, {'error': error}) except socket.timeout: return self.error_to_response(request, {'error': _('Could not connect to service (timed out)')})
def post(self, request)
Create a client, store it in the user's session and redirect the user to the API provider to authorize our app and permissions.
4.132537
3.929304
1.051722
try: client = request.session[self.get_client().get_session_key()] logger.debug("API returned: %s", request.GET) client.complete(dict(request.GET.items())) request.session[self.get_client().get_session_key()] = client return HttpResponseRedirect(self.get_redirect()) except KeyError: return self.error_to_response(request, {'error': "Session expired."}) except OAuthError, error: return self.error_to_response(request, {'error': error}) except socket.timeout: return self.error_to_response(request, {'error': _('Could not connect to service (timed out)')})
def get(self, request)
Called after the user is redirected back to our application. Tries to: - Complete the OAuth / OAuth2 flow - Redirect the user to another view that deals with login, connecting or user creation.
4.064845
3.817567
1.064774
try: client = request.session[self.get_client().get_session_key()] except KeyError: return self.error_to_response(request, {'error': "Session expired."}) # Get the lookup dictionary to find the user's profile lookup_kwargs = self.get_lookup_kwargs(request, client) # Logged in user (re-)connecting an account if request.user.is_authenticated(): try: profile = self.get_profile(**lookup_kwargs) # Make sure that there is only *one* account per profile. if not profile.user == request.user: self.delete_session_data(request) return self.error_to_response(request, { 'error': _('This profile is already connected to another user account.') }) except self.get_model().DoesNotExist: profile, created = self.get_or_create_profile(request.user, save=True, **lookup_kwargs) self.send_connect_signal(request, request.user, profile, client) return self.redirect(request) # Logged out user - let's see if we've got the identity saved already. # If so - just log the user in. If not, create profile and redirect # to the setup view user = self.authenticate(**lookup_kwargs) # No user existing - create a new one and redirect to the final setup view if user is None: if not ALLOW_OPENID_SIGNUPS and self.client is OpenIDClient: return self.error_to_response(request, { 'error': _('We are not currently accepting new OpenID signups.') }) user = self.create_user() profile = self.create_profile(user, **lookup_kwargs) self.store_user(request, user) self.store_profile(request, profile) self.store_client(request, client) return HttpResponseRedirect(reverse('socialregistration:setup')) # Inactive user - displaying / redirect to the appropriate place. if not user.is_active: return self.inactive_response(request) # Active user with existing profile: login, send signal and redirect self.login(request, user) profile = self.get_profile(user=user, **lookup_kwargs) self.send_login_signal(request, user, profile, client) return self.redirect(request)
def get(self, request)
Called after authorization was granted and the OAuth flow successfully completed. Tries to: - Connect the remote account if the user is logged in already - Log the user in if a local profile of the remote account exists already - Create a user and profile object if none of the above succeed and redirect the user further to either capture some data via form or generate a username automatically
4.09955
4.052144
1.011699
# We're just starting out and don't have neither request nor access # token. Return the standard client if not self._request_token and not self._access_token: client = oauth.Client(self.consumer, timeout=TIMEOUT) # We're one step in, we've got the request token and can add that to # the client. if self._request_token and not self._access_token: if verifier is not None: self._request_token.set_verifier(verifier) client = oauth.Client(self.consumer, self._request_token, timeout=TIMEOUT) # Two steps in, we've got an access token and can now properly sign # our client requests with it. if self._access_token: client = oauth.Client(self.consumer, self._access_token, timeout=TIMEOUT) return client
def client(self, verifier=None)
Return the correct client depending on which stage of the OAuth process we're in.
3.97323
3.689492
1.076904
params = { 'oauth_callback': self.get_callback_url() } response, content = self.client().request(self.request_token_url, "POST", body=urllib.urlencode(params)) content = smart_unicode(content) if not response['status'] == '200': raise OAuthError(_( u"Invalid status code %s while obtaining request token from %s: %s") % ( response['status'], self.request_token_url, content)) token = dict(urlparse.parse_qsl(content)) return oauth.Token(token['oauth_token'], token['oauth_token_secret'])
def _get_request_token(self)
Fetch a request token from `self.request_token_url`.
2.797994
2.586376
1.08182
response, content = self.client(verifier).request( self.access_token_url, "POST") content = smart_unicode(content) if not response['status'] == '200': raise OAuthError(_( u"Invalid status code %s while obtaining access token from %s: %s") % (response['status'], self.access_token_url, content)) token = dict(urlparse.parse_qsl(content)) return (oauth.Token(token['oauth_token'], token['oauth_token_secret']), token)
def _get_access_token(self, verifier=None)
Fetch an access token from `self.access_token_url`.
3.647007
3.540004
1.030227
if self._request_token is None: self._request_token = self._get_request_token() return self._request_token
def get_request_token(self)
Return the request token for this API. If we've not fetched it yet, go out, request and memoize it.
3.001608
2.229771
1.346151
if self._access_token is None: self._access_token, self._access_token_dict = self._get_access_token(verifier) return self._access_token
def get_access_token(self, verifier=None)
Return the access token for this API. If we've not fetched it yet, go out, request and memoize it.
2.809295
2.484763
1.130609
params = { 'oauth_token': self.get_request_token().key, } return '%s?%s' % (self.auth_url, urllib.urlencode(params))
def get_redirect_url(self, **kwargs)
Return the authorization/authentication URL signed with the request token.
3.174165
2.347535
1.352127
token = self.get_access_token(verifier=GET.get('oauth_verifier', None)) return token
def complete(self, GET)
When redirect back to our application, try to complete the flow by requesting an access token. If the access token request fails, it'll throw an `OAuthError`. Tries to complete the flow by validating against the `GET` paramters received.
7.165703
7.171484
0.999194
params = params or {} headers = headers or {} logger.debug("URL: %s", url) logger.debug("Method: %s", method) logger.debug("Headers: %s", headers) logger.debug("Params: %s", params) response, content = self.client().request(url, method, headers=headers, body=urllib.urlencode(params)) content = smart_unicode(content) logger.debug("Status: %s", response['status']) logger.debug("Content: %s", content) if response['status'] != '200': raise OAuthError(_( u"Invalid status code %s while requesting %s: %s") % ( response['status'], url, content)) return content
def request(self, url, method="GET", params=None, headers=None)
Make signed requests against `url`.
2.261886
2.250148
1.005216
params = { 'response_type': 'code', 'client_id': self.client_id, 'redirect_uri': self.get_callback_url(**kwargs), 'scope': self.scope or '', 'state': state, } return '%s?%s' % (self.auth_url, urllib.urlencode(params))
def get_redirect_url(self, state='', **kwargs)
Assemble the URL to where we'll be redirecting the user to to request permissions.
1.871362
1.835907
1.019312
return self.request(self.access_token_url, method="POST", params=params, is_signed=False)
def request_access_token(self, params)
Request the access token from `self.access_token_url`. The default behaviour is to use a `POST` request, but some services use `GET` requests. Individual clients can override this method to use the correct HTTP method.
5.451387
4.243337
1.284694
params.update({ 'code': code, 'client_id': self.client_id, 'client_secret': self.secret, 'redirect_uri': self.get_callback_url(), }) logger.debug("Params: %s", params) resp, content = self.request_access_token(params=params) content = smart_unicode(content) logger.debug("Status: %s", resp['status']) logger.debug("Content: %s", content) content = self.parse_access_token(content) if 'error' in content: raise OAuthError(_( u"Received error while obtaining access token from %s: %s") % ( self.access_token_url, content['error'])) return content
def _get_access_token(self, code, **params)
Fetch an access token with the provided `code`.
2.609099
2.629977
0.992062
if self._access_token is None: if code is None: raise ValueError(_('Invalid code.')) self.access_token_dict = self._get_access_token(code, **params) try: self._access_token = self.access_token_dict['access_token'] except KeyError, e: raise OAuthError("Credentials could not be validated, the provider returned no access token.") return self._access_token
def get_access_token(self, code=None, **params)
Return the memoized access token or go out and fetch one.
3.566091
3.382552
1.054261
if 'error' in GET: raise OAuthError( _("Received error while obtaining access token from %s: %s") % ( self.access_token_url, GET['error'])) return self.get_access_token(code=GET.get('code'))
def complete(self, GET)
Complete the OAuth2 flow by fetching an access token with the provided code in the GET parameters.
4.995717
4.059135
1.230734
params = params or {} headers = headers or {} if is_signed: params.update(self.get_signing_params()) if method.upper() == "GET": url = '%s?%s' % (url, urllib.urlencode(params)) return self.client().request(url, method=method, headers=headers) return self.client().request(url, method, body=urllib.urlencode(params), headers=headers)
def request(self, url, method="GET", params=None, headers=None, is_signed=True)
Make a request against ``url``. By default, the request is signed with an access token, but can be turned off by passing ``is_signed=False``.
2.341231
2.447082
0.956744
return self.client().request(self.access_token_url, method="POST", body=urllib.urlencode(params), headers={'Content-Type':'application/x-www-form-urlencoded'})
def request_access_token(self, params)
Google requires correct content-type for POST requests
3.328936
2.826841
1.177617
v = self._read32() # Ignore bottom 4 bits of thermocouple data. v >>= 4 # Grab bottom 11 bits as internal temperature data. internal = v & 0x7FF if v & 0x800: # Negative value, take 2's compliment. Compute this with subtraction # because python is a little odd about handling signed/unsigned. internal -= 4096 # Scale by 0.0625 degrees C per bit and return value. return internal * 0.0625
def readInternalC(self)
Return internal temperature value in degrees celsius.
6.873336
6.024524
1.140893
v = self._read32() # Check for error reading value. if v & 0x7: return float('NaN') # Check if signed bit is set. if v & 0x80000000: # Negative value, take 2's compliment. Compute this with subtraction # because python is a little odd about handling signed/unsigned. v >>= 18 v -= 16384 else: # Positive value, just shift the bits to get the value. v >>= 18 # Scale by 0.25 degrees C per bit and return value. return v * 0.25
def readTempC(self)
Return the thermocouple temperature value in degrees celsius.
5.075838
4.90926
1.033931
if self.is_string(val) and self._in_quotes(val, self.quote): # make sure any previously escaped quotes are not re-escaped middle = self.remove_quotes(val).replace("\\" + self.quote, self.quote) middle = middle.replace(self.quote, "\\" + self.quote) val = self.add_quotes(middle) return val
def escape_quotes(self, val)
Escape any quotes in a value
4.270305
4.096295
1.04248
if self._in_quotes(val, self.altquote): middle = self.remove_quotes(val) val = self.add_quotes(middle) return self.escape_quotes(val)
def standardise_quotes(self, val)
Change the quotes used to wrap a value to the pprint default E.g. "val" to 'val' or 'val' to "val"
6.856477
6.952329
0.986213
# add any composite level comments comments = d.get("__comments__", {}) lines = [] self._add_type_comment(level, comments, lines) lines += [self.add_start_line(key, level)] lines += self.process_dict(d, level, comments) lines.append(self.add_end_line(level, 1, key)) return lines
def process_key_dict(self, key, d, level)
Process key value dicts e.g. METADATA "key" "value"
5.946648
5.885984
1.010306
lines = [] for k, v in d.items(): if not self.__is_metadata(k): qk = self.quoter.add_quotes(k) qv = self.quoter.add_quotes(v) line = self.__format_line(self.whitespace(level, 2), qk, qv) line += self.process_attribute_comment(comments, k) lines.append(line) return lines
def process_dict(self, d, level, comments)
Process keys and values within a block
4.334571
4.225512
1.02581
lines = [] for k, v in d.items(): k = "CONFIG {}".format(self.quoter.add_quotes(k.upper())) v = self.quoter.add_quotes(v) lines.append(self.__format_line(self.whitespace(level, 1), k, v)) return lines
def process_config_dict(self, key, d, level)
Process the CONFIG block
4.441517
4.051939
1.096146
lines = [] for v in lst: k = key.upper() v = self.quoter.add_quotes(v) lines.append(self.__format_line(self.whitespace(level, 1), k, v)) return lines
def process_repeated_list(self, key, lst, level)
Process blocks of repeated keys e.g. FORMATOPTION
6.174133
5.845191
1.056276
lines = [self.add_start_line(key, level)] list_spacer = self.spacer * (level + 2) pairs = ["{}{} {}".format(list_spacer, p[0], p[1]) for p in pair_list] lines += pairs lines.append(self.add_end_line(level, 1, key)) return lines
def format_pair_list(self, key, pair_list, level)
Process lists of pairs (e.g. PATTERN block)
4.227918
4.038117
1.047002
lines = [] def depth(L): return isinstance(L, (tuple, list)) and max(map(depth, L)) + 1 if depth(root_list) == 2: # single set of points only root_list = [root_list] for pair_list in root_list: lines += self.format_pair_list(key, pair_list, level) return lines
def format_repeated_pair_list(self, key, root_list, level)
Process (possibly) repeated lists of pairs e.g. POINTs blocks
3.675015
3.405843
1.079033
if key in ("layers", "classes", "styles", "symbols", "labels", "outputformats", "features", "scaletokens", "composites") and isinstance(val, list): return True else: return False
def is_hidden_container(self, key, val)
The key is not one of the Mapfile keywords, and its values are a list
11.753826
9.057718
1.297659
# if only a single composite is used then cast to list # and allow for multiple root composites if composites and not isinstance(composites, list): composites = [composites] lines = [] for composite in composites: type_ = composite["__type__"] if type_ in ("metadata", "validation"): # types are being parsed directly, and not as an attr of a parent lines += self.process_key_dict(type_, composite, level=0) else: lines += self._format(composite) result = str(self.newlinechar.join(lines)) return result
def pprint(self, composites)
Print out a nicely indented Mapfile
7.288059
7.065629
1.031481
if isinstance(value, bool): return str(value).upper() if any(i in ["enum"] for i in attr_props): if not isinstance(value, numbers.Number): return value.upper() # value is from a set list, no need for quote else: return value if "type" in attr_props and attr_props["type"] == "string": # and "enum" not in attr_props # check schemas for expressions and handle accordingly if self.is_expression(attr_props) and self.quoter.in_slashes(value): return value elif self.is_expression(attr_props) and (value.endswith("'i") or value.endswith('"i')): # for case insensitive regex return value else: return self.quoter.add_quotes(value) # expressions can be one of a string or an expression in brackets if any(i in ["oneOf", "anyOf"] for i in attr_props): # and check that type string is in list if "oneOf" in attr_props: options_list = attr_props["oneOf"] else: options_list = attr_props["anyOf"] if self.quoter.is_string(value): if self.quoter.in_parenthesis(value): pass elif attr == "expression" and self.quoter.in_braces(value): # don't add quotes to list expressions such as {val1, val2} pass elif attr != "text" and self.quoter.in_brackets(value): # TEXT expressions are often "[field1]-[field2]" so need to leave quotes for these pass elif value.startswith("NOT ") and self.quoter.in_parenthesis(value[4:]): value = "NOT {}".format(value[4:]) else: value = self.check_options_list(options_list, value) if isinstance(value, list): new_values = [] for v in value: if not isinstance(v, numbers.Number): v = self.quoter.add_quotes(v) new_values.append(v) value = " ".join(list(map(str, new_values))) else: value = self.quoter.escape_quotes(value) return value
def format_value(self, attr, attr_props, value)
TODO - refactor and add more specific tests (particularly for expressions)
3.811053
3.696326
1.031038
attr_props = self.get_attribute_properties(type_, attr) value = self.format_value(attr, attr_props, value) line = self.__format_line(self.whitespace(level, 1), attr.upper(), value) return line
def process_attribute(self, type_, attr, value, level)
Process one of the main composite types (see the type_ value)
5.489114
5.355228
1.025001
if key not in comments: comment = "" else: value = comments[key] spacer = self.whitespace(level, 0) if isinstance(value, list): comments = [self.format_comment(spacer, v) for v in value] comment = self.newlinechar.join(comments) else: comment = self.format_comment(spacer, value) return comment
def process_composite_comment(self, level, comments, key)
Process comments for composites such as MAP, LAYER etc.
3.558413
3.451878
1.030863
open_images = [] for fn in img_files: print(fn) im = Image.open(fn) open_images.append(im) im = open_images[0] im.save(r"C:\temp\animation.gif", save_all=True, append_images=open_images[1:], duration=120, loop=100, optimize=True)
def create_animation(img_files)
See http://pillow.readthedocs.io/en/4.2.x/handbook/image-file-formats.html?highlight=append_images#saving
2.67037
2.640858
1.011175
composites = [] for composite_dict in children: if False and self.include_position: key_token = composite_dict[1] key_name = key_token.value.lower() composites_position = self.get_position_dict(composite_dict) composites_position[key_name] = self.create_position_dict(key_token, None) composites.append(composite_dict) # only return a list when there are multiple root composites (e.g. # several CLASSes) if len(composites) == 1: return composites[0] else: return composites
def start(self, children)
Parses a MapServer Mapfile Parsing of partial Mapfiles or lists of composites is also possible
4.730289
4.802561
0.984951
if len(t) == 1: return t[0] # metadata and values - already processed key_token = t[0][0] attribute_dicts = t[1] if not isinstance(attribute_dicts, list): # always handle a list of attributes attribute_dicts = [attribute_dicts] key_name = self.key_name(key_token) composite_dict = CaseInsensitiveOrderedDict(CaseInsensitiveOrderedDict) composite_dict["__type__"] = key_name if self.include_position: pd = self.create_position_dict(key_token, None) composite_dict["__position__"] = pd if self.include_comments: comments_dict = composite_dict["__comments__"] = OrderedDict() for d in attribute_dicts: keys = d.keys() if "__type__" in keys: k = d["__type__"] if k in SINGLETON_COMPOSITE_NAMES: composite_dict[k] = d else: plural_key = self.plural(k) if plural_key not in composite_dict: composite_dict[plural_key] = [] composite_dict[plural_key].append(d) else: # simple attribute pos = d.pop("__position__") d.pop("__tokens__", None) # tokens are no longer needed now we have the positions comments = d.pop("__comments__", None) key_name = self.get_single_key(d) if key_name == "config": # there may be several config dicts - one for each setting if key_name not in composite_dict: # create an initial OrderedDict composite_dict[key_name] = CaseInsensitiveOrderedDict(CaseInsensitiveOrderedDict) # populate the existing config dict cfg_dict = composite_dict[key_name] cfg_dict.update(d[key_name]) if self.include_position: if key_name not in pd: pd[key_name] = OrderedDict() subkey_name = self.get_single_key(d[key_name]) pd[key_name][subkey_name] = pos elif key_name == "points": if key_name not in composite_dict: composite_dict[key_name] = d[key_name] else: # if points are already in a feature then # allow for multipart features in a nested list existing_points = composite_dict[key_name] def depth(L): return isinstance(L, (tuple, list)) and max(map(depth, L)) + 1 if depth(existing_points) == 2: composite_dict[key_name] = [existing_points] if key_name not in composite_dict: composite_dict[key_name] = [] composite_dict[key_name].append(d[key_name]) if self.include_position: if key_name not in pd: pd[key_name] = pos else: existing_pos = pd[key_name] if isinstance(existing_pos, dict): pd[key_name] = [existing_pos] pd[key_name].append(pos) elif key_name in REPEATED_KEYS: if key_name not in composite_dict: composite_dict[key_name] = [] composite_dict[key_name].append(d[key_name]) if self.include_position: if key_name not in pd: pd[key_name] = [] pd[key_name].append(pos) else: assert len(d.items()) == 1 if self.include_position: # hoist position details to composite pd[key_name] = pos if self.include_comments and comments: # hoist comments to composite comments_dict[key_name] = comments composite_dict[key_name] = d[key_name] return composite_dict
def composite(self, t)
Handle the composite types e.g. CLASS..END t is a list in the form [[Token(__LAYER36, 'LAYER')], [OrderedDict([...])]]
2.700006
2.620482
1.030347
assert len(tokens) >= 2 key = tokens[0] assert key.value.lower() == name assert tokens[-1].value.lower() == "end" if len(tokens) == 2: body = [] # empty TYPE..END block else: body = tokens[1:-1] body_tokens = [] for t in body: if isinstance(t, dict): body_tokens.append(t["__tokens__"]) else: body_tokens.append(t) return key, body_tokens
def check_composite_tokens(self, name, tokens)
Return the key and contents of a KEY..END block for PATTERN, POINTS, and PROJECTION
3.783516
3.133603
1.207401
key, body = self.check_composite_tokens(type_, tokens) key_name = self.key_name(key) d = CaseInsensitiveOrderedDict(CaseInsensitiveOrderedDict) for t in body: k = self.clean_string(t[0].value).lower() v = self.clean_string(t[1].value) if k in d.keys(): log.warning("A duplicate key ({}) was found in {}. Only the last value ({}) will be used. ".format( k, type_, v)) d[k] = v if self.include_position: pd = self.create_position_dict(key, body) d["__position__"] = pd d["__type__"] = key_name # return the token as well as the processed dict so the # composites function works the same way return d
def process_value_pairs(self, tokens, type_)
Metadata, Values, and Validation blocks can either have string pairs or attributes Attributes will already be processed
5.40141
5.618519
0.961358
func, params = t func_name = func.value func.value = "({}({}))".format(func_name, params) return func
def func_call(self, t)
For function calls e.g. TEXT (tostring([area],"%.2f"))
6.217397
5.785327
1.074684
if len(metadata) > 2: string_pairs = metadata[1:-1] # get all metadata pairs for sp in string_pairs: # get the raw metadata key if isinstance(sp.children[0], Token): token = sp.children[0] assert token.type == "UNQUOTED_STRING" key = token.value else: # quoted string (double or single) token = sp.children[0].children[0] key = token.value # clean it to match the dict key key = self._mapfile_todict.clean_string(key).lower() assert key in d.keys() key_comments = self.get_comments(sp.meta) d["__comments__"][key] = key_comments return d
def add_metadata_comments(self, d, metadata)
Any duplicate keys will be replaced with the last duplicate along with comments
5.723941
5.675495
1.008536
comments = list(comments) comments.sort(key=lambda c: c.line) idx_by_line = {0: 0} # {line_no: comment_idx} for i, c in enumerate(comments): if c.line not in idx_by_line: idx_by_line[c.line] = i idx = [] # convert comment tokens to strings, and remove any line breaks self.comments = [c.value.strip() for c in comments] last_comment_line = max(idx_by_line.keys()) # make a list with an entry for each line # number associated with a comment list index for i in range(last_comment_line, 0, -1): if i in idx_by_line: # associate line with new comment idx.append(idx_by_line[i]) else: # associate line with following comment idx.append(idx[-1]) idx.append(0) # line numbers start from 1 idx.reverse() self.idx = idx self._assign_comments(tree, 0)
def assign_comments(self, tree, comments)
Capture any comments in the tree header_comments stores comments preceding a node
3.562313
3.536699
1.007242
if PY2 and not isinstance(text, unicode): # specify Unicode for Python 2.7 text = unicode(text, 'utf-8') if self.expand_includes: text = self.load_includes(text, fn=fn) try: self._comments[:] = [] # clear any comments from a previous parse tree = self.lalr.parse(text) if self.include_comments: self.assign_comments(tree, self._comments) return tree except (ParseError, UnexpectedInput) as ex: if fn: log.error("Parsing of {} unsuccessful".format(fn)) else: log.error("Parsing of Mapfile unsuccessful") log.info(ex) raise
def parse(self, text, fn=None)
Parse the Mapfile
4.538562
4.163016
1.09021
node = pydot.Node(child_id, style="filled", fillcolor=colour, label=child_label, shape="polygon", fontname=FONT) graph.add_node(node) graph.add_edge(pydot.Edge(parent_id, node))
def add_child(graph, child_id, child_label, parent_id, colour)
http://www.graphviz.org/doc/info/shapes.html#polygon
2.59394
2.268894
1.143262
# replace any Windows path back slashes with forward slashes schemas_folder = schemas_folder.replace("\\", "/") # HACK Python 2.7 on Linux seems to remove the root slash # so add this back in if schemas_folder.startswith("/"): schemas_folder = "/" + schemas_folder host = "" root_schema_path = "file://{}/{}".format(host, schemas_folder) + "/" return root_schema_path
def get_schema_path(self, schemas_folder)
Return a file protocol URI e.g. file:///D:/mappyfile/mappyfile/schemas/ on Windows and file:////home/user/mappyfile/mappyfile/schemas/ on Linux
5.356221
4.89868
1.093401
if schema_name not in self.schemas: schema_file = self.get_schema_file(schema_name) with open(schema_file) as f: try: jsn_schema = json.load(f) except ValueError as ex: log.error("Could not load %s", schema_file) raise ex schemas_folder = self.get_schemas_folder() root_schema_path = self.get_schema_path(schemas_folder) resolver = jsonschema.RefResolver(root_schema_path, None) # cache the schema for future use self.schemas[schema_name] = (jsn_schema, resolver) else: jsn_schema, resolver = self.schemas[schema_name] validator = jsonschema.Draft4Validator(schema=jsn_schema, resolver=resolver) # validator.check_schema(jsn_schema) # check schema is valid return validator
def get_schema_validator(self, schema_name)
Had to remove the id property from map.json or it uses URLs for validation See various issues at https://github.com/Julian/jsonschema/pull/306
2.392705
2.321067
1.030864
if not path: # error applies to the root type d = rootdict key = d["__type__"] elif isinstance(path[-1], int): # the error is on an object in a list d = utils.findkey(rootdict, *path) key = d["__type__"] else: key = path[-1] d = utils.findkey(rootdict, *path[:-1]) error_message = "ERROR: Invalid value in {}".format(key.upper()) # add a comment to the dict structure if add_comments: if "__comments__" not in d: d["__comments__"] = OrderedDict() d["__comments__"][key] = "# {}".format(error_message) error_message = {"error": error.message, "message": error_message} # add in details of the error line, when Mapfile was parsed to # include position details if "__position__" in d: if not path: # position for the root object is stored in the root of the dict pd = d["__position__"] else: pd = d["__position__"][key] error_message["line"] = pd.get("line") error_message["column"] = pd.get("column") return error_message
def create_message(self, rootdict, path, error, add_comments)
Add a validation comment to the dictionary path is the path to the error object, it can be empty if the error is in the root object http://python-jsonschema.readthedocs.io/en/latest/errors/#jsonschema.exceptions.ValidationError.absolute_path It can also reference an object in a list e.g. [u'layers', 0] Unfortunately it is not currently possible to get the name of the failing property from the JSONSchema error object, even though it is in the error message. See https://github.com/Julian/jsonschema/issues/119
4.231578
4.071833
1.039232
validator = self.get_schema_validator(schema_name) error_messages = [] if isinstance(value, list): for d in value: error_messages += self._validate(d, validator, add_comments, schema_name) else: error_messages = self._validate(value, validator, add_comments, schema_name) return error_messages
def validate(self, value, add_comments=False, schema_name="map")
verbose - also return the jsonschema error details
2.39798
2.251622
1.065001
if schema_name not in self.expanded_schemas: fn = self.get_schema_file(schema_name) schemas_folder = self.get_schemas_folder() base_uri = self.get_schema_path(schemas_folder) with open(fn) as f: jsn_schema = jsonref.load(f, base_uri=base_uri) # cache the schema for future use self.expanded_schemas[schema_name] = jsn_schema else: jsn_schema = self.expanded_schemas[schema_name] return jsn_schema
def get_expanded_schema(self, schema_name)
Return a schema file with all $ref properties expanded
2.556555
2.380004
1.074181
p = Parser() t = ExpressionsTransformer() ast = p.parse(s) logging.debug(ast.pretty()) print(ast.pretty()) d = t.transform(ast) print(json.dumps(d, indent=4)) return d
def output(s)
Parse, transform, and pretty print the result
4.588357
4.335342
1.058361
assert(len(t) == 3) d = {"PropertyIsEqualTo": [ t[0], t[1], t[2] ]} #parts = [str(p.value) for p in t] #v = " ".join(parts) #v = "( {} )".format(v) #t[0].value = v #return t[0] return d
def comparison(self, t)
<PropertyIsEqualTo> <PropertyName>NAME</PropertyName> <Literal>Sydney</Literal> </PropertyIsEqualTo>
5.962983
5.553046
1.073822
verbosity = verbose - quiet configure_logging(verbosity) ctx.obj = {} ctx.obj['verbosity'] = verbosity
def main(ctx, verbose, quiet)
Execute the main mappyfile command
4.363794
5.103943
0.854985
quote = codecs.decode(quote, 'unicode_escape') # ensure \t is handled as a tab spacer = codecs.decode(spacer, 'unicode_escape') # ensure \t is handled as a tab newlinechar = codecs.decode(newlinechar, 'unicode_escape') # ensure \n is handled as a newline d = mappyfile.open(input_mapfile, expand_includes=expand, include_comments=comments, include_position=True) mappyfile.save(d, output_mapfile, indent=indent, spacer=spacer, quote=quote, newlinechar=newlinechar) sys.exit(0)
def format(ctx, input_mapfile, output_mapfile, indent, spacer, quote, newlinechar, expand, comments)
Format a the input-mapfile and save as output-mapfile. Note output-mapfile will be overwritten if it already exists. Example of formatting a single Mapfile: mappyfile format C:/Temp/valid.map C:/Temp/valid_formatted.map Example of formatting a single Mapfile with single quotes and tabs for indentation: mappyfile format C:/Temp/valid.map C:/Temp/valid_formatted.map --quote=\\' --indent=1 --spacer=\t Example of formatting a single Mapfile without expanding includes, but including comments: mappyfile format C:/Temp/valid.map C:/Temp/valid_formatted.map --no-expand --comments
2.831583
2.769393
1.022456
all_mapfiles = get_mapfiles(mapfiles) if len(all_mapfiles) == 0: click.echo("No Mapfiles found at the following paths: {}".format(",".join(mapfiles))) return validation_count = 0 errors = 0 for fn in all_mapfiles: fn = click.format_filename(fn) d = mappyfile.open(fn, expand_includes=expand, include_position=True) validation_messages = mappyfile.validate(d) if validation_messages: for v in validation_messages: v["fn"] = fn msg = "{fn} (Line: {line} Column: {column}) {message} - {error}".format(**v) click.echo(msg) errors += 1 else: click.echo("{} validated successfully".format(fn)) validation_count += 1 click.echo("{} file(s) validated ({} successfully)".format(len(all_mapfiles), validation_count)) sys.exit(errors)
def validate(ctx, mapfiles, expand)
Validate Mapfile(s) against the Mapfile schema The MAPFILES argument is a list of paths, either to individual Mapfiles, or a folders containing Mapfiles. Wildcards are supported (natively on Linux, and up to one level deep on Windows). Validation errors are reported to the console. The program returns the error count - this will be 0 if no validation errors are encountered. Example of validating a single Mapfile: mappyfile validate C:/Temp/valid.map Example of validating two folders containing Mapfiles, without expanding INCLUDES: mappyfile validate C:/Temp/*.map D:/GitHub/mappyfile/tests/mapfiles/*.map --no-expand
3.062497
3.121586
0.981071
first_word = text.split(" ")[0] if len(first_word) > 1 and first_word.isupper(): kwd = str(first_word.lower()) else: kwd = None return kwd
def get_keyword(text)
Accept a string such as BACKGROUNDCOLOR [r] [g] [b] and return backgroundcolor
3.424278
3.754489
0.912049
res = re.findall(r"\[(.*?)\]", text) values = [] for r in res: if "|" in r: params = r.split("|") for p in params: values.append(p) else: values.append(r) values = [str(v.lower()) for v in values] return values
def get_values(text)
Accept a string such as BACKGROUNDCOLOR [r] [g] [b] and return ['r', 'g', 'b']
2.798831
2.843008
0.984461
# remove :ref: directives document = docutils.core.publish_doctree(text) # http://epydoc.sourceforge.net/docutils/private/docutils.nodes.document-class.html visitor = RefVisitor(document) document.walk(visitor) return visitor.kwd, visitor.values
def process_doc(text)
The :ref: role is supported by Sphinx but not by plain docutils
9.152032
7.489944
1.221909
p = Parser(expand_includes=expand_includes, include_comments=include_comments, **kwargs) ast = p.parse_file(fn) m = MapfileToDict(include_position=include_position, include_comments=include_comments, **kwargs) d = m.transform(ast) return d
def open(fn, expand_includes=True, include_comments=False, include_position=False, **kwargs)
Load a Mapfile from the supplied filename into a Python dictionary. Parameters ---------- fn: string The path to the Mapfile, or partial Mapfile expand_includes: boolean Load any ``INCLUDE`` files in the MapFile include_comments: boolean Include or discard comment strings from the Mapfile - *experimental* include_position: boolean Include the position of the Mapfile tokens in the output Returns ------- dict A Python dictionary representing the Mapfile in the mappyfile format Example ------- To open a Mapfile from a filename and return it as a dictionary object:: d = mappyfile.open('mymap.map') Notes ----- Partial Mapfiles can also be opened, for example a file containing a ``LAYER`` object.
3.346074
4.770063
0.701474
p = Parser(expand_includes=expand_includes, include_comments=include_comments, **kwargs) ast = p.load(fp) m = MapfileToDict(include_position=include_position, include_comments=include_comments, **kwargs) d = m.transform(ast) return d
def load(fp, expand_includes=True, include_position=False, include_comments=False, **kwargs)
Load a Mapfile from an open file or file-like object. Parameters ---------- fp: file A file-like object - as with all Mapfiles this should be encoded in "utf-8" expand_includes: boolean Load any ``INCLUDE`` files in the MapFile include_comments: boolean Include or discard comment strings from the Mapfile - *experimental* include_position: boolean Include the position of the Mapfile tokens in the output Returns ------- dict A Python dictionary representing the Mapfile in the mappyfile format Example ------- To open a Mapfile from a file and return it as a dictionary object:: with open('mymap.map') as fp: d = mappyfile.load(fp) Notes ----- Partial Mapfiles can also be opened, for example a file containing a ``LAYER`` object.
3.440614
4.703512
0.731499
p = Parser(expand_includes=expand_includes, include_comments=include_comments, **kwargs) ast = p.parse(s) m = MapfileToDict(include_position=include_position, include_comments=include_comments, **kwargs) d = m.transform(ast) return d
def loads(s, expand_includes=True, include_position=False, include_comments=False, **kwargs)
Load a Mapfile from a string Parameters ---------- s: string The Mapfile, or partial Mapfile, text expand_includes: boolean Load any ``INCLUDE`` files in the MapFile include_comments: boolean Include or discard comment strings from the Mapfile - *experimental* include_position: boolean Include the position of the Mapfile tokens in the output Returns ------- dict A Python dictionary representing the Mapfile in the mappyfile format Example ------- To open a Mapfile from a string and return it as a dictionary object:: s = '''MAP NAME "TEST" END''' d = mappyfile.loads(s) assert d["name"] == "TEST"
3.040735
3.792939
0.801683
map_string = _pprint(d, indent, spacer, quote, newlinechar, end_comment) fp.write(map_string)
def dump(d, fp, indent=4, spacer=" ", quote='"', newlinechar="\n", end_comment=False)
Write d (the Mapfile dictionary) as a formatted stream to fp Parameters ---------- d: dict A Python dictionary based on the the mappyfile schema fp: file A file-like object indent: int The number of ``spacer`` characters to indent structures in the Mapfile spacer: string The character to use for indenting structures in the Mapfile. Typically spaces or tab characters (``\\t``) quote: string The quote character to use in the Mapfile (double or single quotes) newlinechar: string The character used to insert newlines in the Mapfile end_comment: bool Add a comment with the block type at each closing END statement e.g. END # MAP Example ------- To open a Mapfile from a string, and then dump it back out to an open file, using 2 spaces for indentation, and single-quotes for properties:: s = '''MAP NAME "TEST" END''' d = mappyfile.loads(s) with open(fn, "w") as f: mappyfile.dump(d, f, indent=2, quote="'")
3.310647
6.415533
0.516036
map_string = _pprint(d, indent, spacer, quote, newlinechar, end_comment) _save(output_file, map_string) return output_file
def save(d, output_file, indent=4, spacer=" ", quote='"', newlinechar="\n", end_comment=False, **kwargs)
Write a dictionary to an output Mapfile on disk Parameters ---------- d: dict A Python dictionary based on the the mappyfile schema output_file: string The output filename indent: int The number of ``spacer`` characters to indent structures in the Mapfile spacer: string The character to use for indenting structures in the Mapfile. Typically spaces or tab characters (``\\t``) quote: string The quote character to use in the Mapfile (double or single quotes) newlinechar: string The character used to insert newlines in the Mapfile end_comment: bool Add a comment with the block type at each closing END statement e.g. END # MAP Returns ------- string The output_file passed into the function Example ------- To open a Mapfile from a string, and then save it to a file:: s = '''MAP NAME "TEST" END''' d = mappyfile.loads(s) fn = "C:/Data/mymap.map" mappyfile.save(d, fn)
3.574636
6.302959
0.567136
return _pprint(d, indent, spacer, quote, newlinechar, end_comment, **kwargs)
def dumps(d, indent=4, spacer=" ", quote='"', newlinechar="\n", end_comment=False, **kwargs)
Output a Mapfile dictionary as a string Parameters ---------- d: dict A Python dictionary based on the the mappyfile schema indent: int The number of ``spacer`` characters to indent structures in the Mapfile spacer: string The character to use for indenting structures in the Mapfile. Typically spaces or tab characters (``\\t``) quote: string The quote character to use in the Mapfile (double or single quotes) newlinechar: string The character used to insert newlines in the Mapfile end_comment: bool Add a comment with the block type at each closing END statement e.g. END # MAP Returns ------- string The Mapfile as a string Example ------- To open a Mapfile from a string, and then print it back out as a string using tabs:: s = '''MAP NAME "TEST" END''' d = mappyfile.loads(s) print(mappyfile.dumps(d, indent=1, spacer="\\t"))
2.823073
7.266335
0.388514
return next((item for item in lst if item[key.lower()] == value), None)
def find(lst, key, value)
Find an item in a list of dicts using a key and a value Parameters ---------- list: list A list of composite dictionaries e.g. ``layers``, ``classes`` key: string The key name to search each dictionary in the list key: value The value to search for Returns ------- dict The first composite dictionary object with a key that matches the value Example ------- To find the ``LAYER`` in a list of layers with ``NAME`` set to ``Layer2``:: s = ''' MAP LAYER NAME "Layer1" TYPE POLYGON END LAYER NAME "Layer2" TYPE POLYGON CLASS NAME "Class1" COLOR 0 0 -8 END END END ''' d = mappyfile.loads(s) cmp = mappyfile.find(d["layers"], "name", "Layer2") assert cmp["name"] == "Layer2"
4.448408
10.54445
0.421872
return [item for item in lst if item[key.lower()] in value]
def findall(lst, key, value)
Find all items in lst where key matches value. For example find all ``LAYER`` s in a ``MAP`` where ``GROUP`` equals ``VALUE`` Parameters ---------- list: list A list of composite dictionaries e.g. ``layers``, ``classes`` key: string The key name to search each dictionary in the list key: value The value to search for Returns ------- list A Python list containing the matching composite dictionaries Example ------- To find all ``LAYER`` s with ``GROUP`` set to ``test``:: s = ''' MAP LAYER NAME "Layer1" TYPE POLYGON GROUP "test" END LAYER NAME "Layer2" TYPE POLYGON GROUP "test1" END LAYER NAME "Layer3" TYPE POLYGON GROUP "test2" END LAYER NAME "Layer4" TYPE POLYGON GROUP "test" END END ''' d = mappyfile.loads(s) layers = mappyfile.findall(d["layers"], "group", "test") assert len(layers) == 2
7.298074
15.512624
0.47046
return sorted(set([item[key.lower()] for item in lst]))
def findunique(lst, key)
Find all unique key values for items in lst. Parameters ---------- lst: list A list of composite dictionaries e.g. ``layers``, ``classes`` key: string The key name to search each dictionary in the list Returns ------- list A sorted Python list of unique keys in the list Example ------- To find all ``GROUP`` values for ``CLASS`` in a ``LAYER``:: s = ''' LAYER CLASS GROUP "group1" NAME "Class1" COLOR 0 0 0 END CLASS GROUP "group2" NAME "Class2" COLOR 0 0 0 END CLASS GROUP "group1" NAME "Class3" COLOR 0 0 0 END END ''' d = mappyfile.loads(s) groups = mappyfile.findunique(d["classes"], "group") assert groups == ["group1", "group2"]
7.293661
15.411687
0.473255
if keys: keys = list(keys) key = keys.pop(0) return findkey(d[key], *keys) else: return d
def findkey(d, *keys)
Get a value from a dictionary based on a list of keys and/or list indexes. Parameters ---------- d: dict A Python dictionary keys: list A list of key names, or list indexes Returns ------- dict The composite dictionary object at the path specified by the keys Example ------- To return the value of the first class of the first layer in a Mapfile:: s = ''' MAP LAYER NAME "Layer1" TYPE POLYGON CLASS NAME "Class1" COLOR 0 0 255 END END END ''' d = mappyfile.loads(s) pth = ["layers", 0, "classes", 0] cls1 = mappyfile.findkey(d, *pth) assert cls1["name"] == "Class1"
2.386737
3.887383
0.61397
NoneType = type(None) if d2.get("__delete__", False): return {} for k, v in d2.items(): if isinstance(v, dict): if v.get("__delete__", False): # allow a __delete__ property to be set to delete objects del d1[k] else: d1[k] = update(d1.get(k, {}), v) elif isinstance(v, (tuple, list)) and all(isinstance(li, (NoneType, dict)) for li in v): # a list of dicts and/or NoneType orig_list = d1.get(k, []) new_list = [] pairs = list(zip_longest(orig_list, v, fillvalue=None)) for orig_item, new_item in pairs: if orig_item is None: orig_item = {} # can't use {} for fillvalue as only one dict created/modified! if new_item is None: new_item = {} if new_item.get("__delete__", False): d = None # orig_list.remove(orig_item) # remove the item to delete else: d = update(orig_item, new_item) if d is not None: new_list.append(d) d1[k] = new_list else: if k in d1 and v == "__delete__": del d1[k] else: d1[k] = v return d1
def update(d1, d2)
Update dict d1 with properties from d2 Note ---- Allows deletion of objects with a special ``__delete__`` key For any list of dicts new items can be added when updating Parameters ---------- d1: dict A Python dictionary d2: dict A Python dictionary that will be used to update any keys with the same name in d1 Returns ------- dict The updated dictionary
2.800113
2.938981
0.95275
ll = mappyfile.find(mapfile["layers"], "name", "line") ll["status"] = "OFF" pl = mappyfile.find(mapfile["layers"], "name", "polygon") # make a deep copy of the polygon layer in the Map # so any modification are made to this layer only pl2 = deepcopy(pl) pl2["name"] = "newpolygon" mapfile["layers"].append(pl2) dilated = dilated.buffer(-0.3) pl2["features"][0]["wkt"] = dilated.wkt style = pl["classes"][0]["styles"][0] style["color"] = "#999999" style["outlinecolor"] = "#b2b2b2"
def erosion(mapfile, dilated)
We will continue to work with the modified Mapfile If we wanted to start from scratch we could simply reread it
5.560046
5.556365
1.000662
content_type = response.headers.get('content-type', '') logger.debug("status[%s] content_type[%s] encoding[%s]" % (response.status_code, content_type, response.encoding)) response.raise_for_status() content = response.content.strip() if response.encoding: content = content.decode(response.encoding) if not content: logger.debug("no content in response") return content if content_type.split(';')[0] != 'application/json': return content if content.startswith(GERRIT_MAGIC_JSON_PREFIX): content = content[len(GERRIT_MAGIC_JSON_PREFIX):] try: return json.loads(content) except ValueError: logger.error('Invalid json content: %s', content) raise
def _decode_response(response)
Strip off Gerrit's magic prefix and decode a response. :returns: Decoded JSON content as a dict, or raw text if content could not be decoded as JSON. :raises: requests.HTTPError if the response contains an HTTP error status code.
2.389728
2.353377
1.015446
local_kwargs = self.kwargs.copy() local_kwargs.update(kwargs) if "data" in local_kwargs and "json" in local_kwargs: raise ValueError("Cannot use data and json together") if "data" in local_kwargs and isinstance(local_kwargs["data"], dict): local_kwargs.update({"json": local_kwargs["data"]}) del local_kwargs["data"] headers = DEFAULT_HEADERS.copy() if "headers" in kwargs: headers.update(kwargs["headers"]) if "json" in local_kwargs: headers.update({"Content-Type": "application/json;charset=UTF-8"}) local_kwargs.update({"headers": headers}) return local_kwargs
def translate_kwargs(self, **kwargs)
Translate kwargs replacing `data` with `json` if necessary.
2.133928
1.972212
1.081998
args = self.translate_kwargs(**kwargs) response = self.session.post(self.make_url(endpoint), **args) decoded_response = _decode_response(response) if return_response: return decoded_response, response return decoded_response
def post(self, endpoint, return_response=False, **kwargs)
Send HTTP POST to the endpoint. :arg str endpoint: The endpoint to send to. :returns: JSON decoded result. :raises: requests.RequestException on timeout or connection error.
3.267009
3.999497
0.816855
result = string result = result.replace('\\', '\\\\') result = result.replace('"', '\\"') return '"' + result + '"'
def escape_string(string)
Escape a string for use in Gerrit commands. :arg str string: The string to escape. :returns: The string with necessary escapes and surrounding double quotes so that it can be passed to any of the Gerrit commands that require double-quoted strings.
2.339703
3.774832
0.619816
if not data: return if isinstance(data, list): # First we need to clean up the data. # # Gerrit creates new bullet items when it gets newline characters # within a bullet list paragraph, so unless we remove the newlines # from the texts the resulting bullet list will contain multiple # bullets and look crappy. # # We add the '*' character on the beginning of each bullet text in # the next step, so we strip off any existing leading '*' that the # caller has added, and then strip off any leading or trailing # whitespace. _items = [x.replace("\n", " ").strip().lstrip('*').strip() for x in data] # Create the bullet list only with the items that still have any # text in them after cleaning up. _paragraph = "\n".join(["* %s" % x for x in _items if x]) if _paragraph: self.paragraphs.append(_paragraph) elif isinstance(data, str): _paragraph = data.strip() if _paragraph: self.paragraphs.append(_paragraph) else: raise ValueError('Data must be a list or a string')
def append(self, data)
Append the given `data` to the output. :arg data: If a list, it is formatted as a bullet list with each entry in the list being a separate bullet. Otherwise if it is a string, the string is added as a paragraph. :raises: ValueError if `data` is not a list or a string.
6.019132
5.618187
1.071365
message = "" if self.paragraphs: if self.header: message += (self.header + '\n\n') message += "\n\n".join(self.paragraphs) if self.footer: message += ('\n\n' + self.footer) return message
def format(self)
Format the message parts to a string. :Returns: A string of all the message parts separated into paragraphs, with header and footer paragraphs if they were specified in the constructor.
2.960659
2.447896
1.209471
for key, value in params.items(): if key not in self.valid_context_params: raise RequestHeaderContextException( "%s is not a valid context parameter." % key )
def set_context_params(self, params)
Set header context parameters. Refer to the top of <Zimbra Server-Root>/docs/soap.txt about specifics. The <format>-Parameter cannot be changed, because it is set by the implementing class. Should be called by implementing method to check for valid context params. :param params: A Dict containing context parameters.
6.119194
4.627219
1.322435
self.batch_request = True self.batch_request_id = 1 self._create_batch_node(onerror)
def enable_batch(self, onerror="continue")
Enables batch request gathering. Do this first and then consecutively call "add_request" to add more requests. :param onerror: "continue" (default) if one request fails (and response with soap Faults for the request) or "stop" processing.
9.16512
8.41158
1.089584
if self.is_batch(): info = self.get_batch() return info['hasFault'] else: my_response = self.get_response() if list(my_response.keys())[0] == "Fault": return True return False
def is_fault(self)
Checks, wether this response has at least one fault response ( supports both batch and single responses)
5.894593
4.256788
1.384751
filtered_dict = {} for key, value in response_dict.items(): if key == "_jsns": continue if key == "xmlns": continue if type(value) == list and len(value) == 1: filtered_dict[key] = value[0] elif type(value) == dict and len(value.keys()) == 1 and "_content" \ in value.keys(): filtered_dict[key] = value["_content"] elif type(value) == dict: tmp_dict = self._filter_response(value) filtered_dict[key] = tmp_dict else: filtered_dict[key] = value return filtered_dict
def _filter_response(self, response_dict)
Add additional filters to the response dictionary Currently the response dictionary is filtered like this: * If a list only has one item, the list is replaced by that item * Namespace-Keys (_jsns and xmlns) are removed :param response_dict: the pregenerated, but unfiltered response dict :type response_dict: dict :return: The filtered dictionary :rtype: dict
2.289921
1.943526
1.17823
if timestamp is None: timestamp = int(datetime.now().strftime("%s")) * 1000 pak = hmac.new( codecs.latin_1_encode(key)[0], ('%s|%s|%s|%s' % ( byval, by, expires, timestamp )).encode("utf-8"), hashlib.sha1 ).hexdigest() return pak
def create_preauth(byval, key, by='name', expires=0, timestamp=None)
Generates a zimbra preauth value :param byval: The value of the targeted user (according to the by-parameter). For example: The account name, if "by" is "name". :param key: The domain preauth key (you can retrieve that using zmprov gd) :param by: What type is the byval-parameter? Valid parameters are "name" (default), "id" and "foreignPrincipal" :param expires: Milliseconds when the auth token expires. Defaults to 0 for default account expiration :param timestamp: Current timestamp (is calculated by default) :returns: The preauth value to be used in an AuthRequest :rtype: str
3.338571
3.43956
0.970639
local_dict = {} for item in zimbra_dict: local_dict[item[key_attribute]] = item[content_attribute] return local_dict
def zimbra_to_python(zimbra_dict, key_attribute="n", content_attribute="_content")
Converts single level Zimbra dicts to a standard python dict :param zimbra_dict: The dictionary in Zimbra-Format :return: A native python dict
2.621681
3.23822
0.809606
for value in haystack: if value[key_attribute] == needle: return value[content_attribute] return None
def get_value(haystack, needle, key_attribute="n", content_attribute="_content")
Fetch a value from a zimbra-like json dict (keys are "n", values are "_content" This function may be slightly faster than zimbra_to_python(haystack)[ needle], because it doesn't necessarily iterate over the complete list. :param haystack: The list in zimbra-dict format :param needle: the key to search for :return: the value or None, if the key is not found
2.921154
4.165854
0.701214
if sys.version < '3': if isinstance(input_string, str) \ or isinstance(input_string, unicode): # pragma: no cover py3 return input_string # pragma: no cover py3 else: if isinstance(input_string, str): # pragma: no cover py3 return input_string # pragma: no cover py3 return str(input_string)
def convert_to_str(input_string)
Returns a string of the input compatible between py2 and py3 :param input_string: :return:
2.476967
2.40903
1.028201
if '_content' in list(xml_dict.keys()): root_node.appendChild( root_node.ownerDocument.createTextNode( convert_to_str(xml_dict['_content']) ) ) for key, value in xml_dict.items(): if key == '_content': continue if type(value) == dict: # Root node tmp_node = root_node.ownerDocument.createElement(key) dict_to_dom(tmp_node, value) root_node.appendChild(tmp_node) elif type(value) == list: for multinode in value: tmp_node = root_node.ownerDocument.createElement(key) dict_to_dom(tmp_node, multinode) root_node.appendChild(tmp_node) else: # Attributes root_node.setAttribute( key, convert_to_str(value) )
def dict_to_dom(root_node, xml_dict)
Create a DOM node and optionally several subnodes from a dictionary. :param root_node: DOM-Node set the dictionary is applied upon :type root_node: xml.dom.Element :param xml_dict: The dictionary containing the nodes to process :type xml_dict: dict
2.014271
1.963413
1.025903
# Remove namespaces from tagname tag = root_node.tagName if ":" in tag: tag = tag.split(":")[1] root_dict = { tag: {} } node_dict = root_dict[tag] # Set attributes if root_node.hasAttributes(): for key in list(root_node.attributes.keys()): node_dict[key] = root_node.getAttribute(key) # Check out child nodes for child in root_node.childNodes: if child.nodeType == root_node.TEXT_NODE: # This is the content node_dict['_content'] = child.data else: subnode_dict = dom_to_dict(child) child_tag = child.tagName if ":" in child_tag: child_tag = child_tag.split(":")[1] new_val = subnode_dict[child_tag] # If we have several child with same name, put them in a list. if child_tag in node_dict: prev_val = node_dict[child_tag] if type(prev_val) != list: node_dict[child_tag] = [prev_val] node_dict[child_tag].append(new_val) else: node_dict[child_tag] = new_val return root_dict
def dom_to_dict(root_node)
Serializes the given node to the dictionary Serializes the given node to the documented dictionary format. :param root_node: Node to serialize :returns: The dictionary :rtype: dict
2.317041
2.401111
0.964987
# Standard implementation from HTTPSConnection, which is not # designed for extension, unfortunately sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) if getattr(self, '_tunnel_host', None): self.sock = sock # pragma: no cover self._tunnel() # pragma: no cover # This is the only difference; default wrap_socket uses SSLv23 self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, ssl_version=ssl.PROTOCOL_TLSv1_2)
def connect(self)
Overrides HTTPSConnection.connect to specify TLS version
3.888808
3.396085
1.145085
if request_type == "json": local_request = RequestJson() elif request_type == "xml": local_request = RequestXml() else: raise UnknownRequestType() if token is not None: local_request.set_auth_token(token) if set_batch: local_request.enable_batch(batch_onerror) return local_request
def gen_request(self, request_type="json", token=None, set_batch=False, batch_onerror=None)
Convenience method to quickly generate a token :param request_type: Type of request (defaults to json) :param token: Authentication token :param set_batch: Also set this request to batch mode? :param batch_onerror: Onerror-parameter for batch mode :return: The request
2.408911
2.711799
0.888307
local_response = None if response is None: if request.request_type == "json": local_response = ResponseJson() elif request.request_type == "xml": local_response = ResponseXml() else: raise UnknownRequestType() try: server_request = ur.urlopen( self.url, request.get_request().encode("utf-8"), self.timeout ) server_response = server_request.read() if isinstance(server_response, bytes): server_response = server_response.decode("utf-8") if response is None: local_response.set_response( server_response ) else: response.set_response(server_response) except ue.HTTPError as e: if e.code == 500: # 500 codes normally returns a SoapFault, that we can use server_response = e.fp.read() if isinstance(server_response, bytes): server_response = server_response.decode("utf-8") if response is None: local_response.set_response(server_response) else: response.set_response(server_response) else: raise e if response is None: return local_response
def send_request(self, request, response=None)
Send the request. Sends the request and retrieves the results, formats them and returns them in a dict or a list (when it's a batchresponse). If something goes wrong, raises a SoapFailure or a HTTPError on system-side failures. Note: AuthRequest raises an HTTPError on failed authentications! :param request: The request to send :type request: pythonzimbra.request.Request :param response: A prebuilt response object :type response: pythonzimbra.response.Response :raises: pythonzimbra.exceptions.communication.SoapFailure or urllib2.HTTPError
2.593108
2.50856
1.033704
if timestamp is None: timestamp = int(time.time()) * 1000 pak = "" if not admin_auth: pak = preauth.create_preauth(account, key, by, expires, timestamp) if request_type == 'xml': auth_request = RequestXml() else: auth_request = RequestJson() request_data = { 'account': { 'by': by, '_content': account } } ns = "urn:zimbraAccount" if admin_auth: ns = "urn:zimbraAdmin" request_data['password'] = key elif use_password: request_data['password'] = { "_content": key } else: request_data['preauth'] = { 'timestamp': timestamp, 'expires': expires, '_content': pak } auth_request.add_request( 'AuthRequest', request_data, ns ) server = Communication(url, timeout) if request_type == 'xml': response = ResponseXml() else: response = ResponseJson() server.send_request(auth_request, response) if response.is_fault(): if raise_on_error: raise AuthenticationFailed( "Cannot authenticate user: (%s) %s" % ( response.get_fault_code(), response.get_fault_message() ) ) return None return response.get_response()['AuthResponse']['authToken']
def authenticate(url, account, key, by='name', expires=0, timestamp=None, timeout=None, request_type="xml", admin_auth=False, use_password=False, raise_on_error=False)
Authenticate to the Zimbra server :param url: URL of Zimbra SOAP service :param account: The account to be authenticated against :param key: The preauth key of the domain of the account or a password (if admin_auth or use_password is True) :param by: If the account is specified as a name, an ID or a ForeignPrincipal :param expires: When the token expires (or 0 for default expiration) :param timestamp: When the token was requested (None for "now") :param timeout: Timeout for the communication with the server. Defaults to the urllib2-default :param request_type: Which type of request to use ("xml" (default) or "json") :param admin_auth: This request should authenticate and generate an admin token. The "key"-parameter therefore holds the admin password (implies use_password) :param use_password: The "key"-parameter holds a password. Do a password- based user authentication. :param raise_on_error: Should I raise an exception when an authentication error occurs or just return None? :return: The authentication token or None :rtype: str or None or unicode
2.77178
2.562878
1.081511
db = ps.open(dbf_path) if cols: if incl_index: cols.append(index) vars_to_read = cols else: vars_to_read = db.header data = dict([(var, db.by_col(var)) for var in vars_to_read]) if index: index = db.by_col(index) db.close() return pd.DataFrame(data, index=index) else: db.close() return pd.DataFrame(data)
def read_dbf(dbf_path, index = None, cols = False, incl_index = False)
Read a dbf file as a pandas.DataFrame, optionally selecting the index variable and which columns are to be loaded. __author__ = "Dani Arribas-Bel <darribas@asu.edu> " ... Arguments --------- dbf_path : str Path to the DBF file to be read index : str Name of the column to be used as the index of the DataFrame cols : list List with the names of the columns to be read into the DataFrame. Defaults to False, which reads the whole dbf incl_index : Boolean If True index is included in the DataFrame as a column too. Defaults to False Returns ------- df : DataFrame pandas.DataFrame object created
2.403785
2.809386
0.855627
margin = compress_pruned( self._slice.margin( axis=0, weighted=False, include_transforms_for_dims=self._hs_dims, prune=self._prune, ) ) mask = margin < self._size if margin.shape == self._shape: # If margin shape is the same as slice's (such as in a col margin for # MR x CAT), don't broadcast the mask to the array shape, since # they're already the same. return mask # If the row margin is a row vector - broadcast it's mask to the array shape return np.logical_or(np.zeros(self._shape, dtype=bool), mask)
def column_mask(self)
ndarray, True where column margin <= min_base_size, same shape as slice.
13.491111
11.409678
1.182427