signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def send_change_notification(hub, topic_url, updated_content=None):
|
if updated_content:<EOL><INDENT>body = base64.b64decode(updated_content['<STR_LIT:content>'])<EOL><DEDENT>else:<EOL><INDENT>body, updated_content = get_new_content(hub.config, topic_url)<EOL><DEDENT>b64_body = updated_content['<STR_LIT:content>']<EOL>headers = updated_content['<STR_LIT>']<EOL>link_header = headers.get('<STR_LIT>', '<STR_LIT>')<EOL>if '<STR_LIT>' not in link_header or '<STR_LIT>' not in link_header:<EOL><INDENT>raise NotificationError(INVALID_LINK)<EOL><DEDENT>for callback_url, secret in hub.storage.get_callbacks(topic_url):<EOL><INDENT>schedule_request(hub, topic_url, callback_url, secret, body, b64_body,<EOL>headers)<EOL><DEDENT>
|
7. Content Distribution
|
f11351:m0
|
def subscribe(hub, callback_url, topic_url, lease_seconds, secret,<EOL>endpoint_hook_data):
|
for validate in hub.validators:<EOL><INDENT>error = validate(callback_url, topic_url, lease_seconds, secret,<EOL>endpoint_hook_data)<EOL>if error:<EOL><INDENT>send_denied(hub, callback_url, topic_url, error)<EOL>return<EOL><DEDENT><DEDENT>if intent_verified(hub, callback_url, '<STR_LIT>', topic_url,<EOL>lease_seconds):<EOL><INDENT>hub.storage[topic_url, callback_url] = {<EOL>'<STR_LIT>': lease_seconds,<EOL>'<STR_LIT>': secret,<EOL>}<EOL><DEDENT>
|
5.2 Subscription Validation
|
f11351:m4
|
def intent_verified(hub, callback_url, mode, topic_url, lease_seconds):
|
challenge = uuid4()<EOL>params = {<EOL>'<STR_LIT>': mode,<EOL>'<STR_LIT>': topic_url,<EOL>'<STR_LIT>': challenge,<EOL>'<STR_LIT>': lease_seconds,<EOL>}<EOL>try:<EOL><INDENT>response = request_url(hub.config, '<STR_LIT:GET>', callback_url, params=params)<EOL>assert response.status_code == <NUM_LIT:200> and response.text == challenge<EOL><DEDENT>except requests.exceptions.RequestException as e:<EOL><INDENT>warn("<STR_LIT>", e)<EOL><DEDENT>except AssertionError as e:<EOL><INDENT>warn(INTENT_UNVERIFIED % (response.status_code, response.content), e)<EOL><DEDENT>else:<EOL><INDENT>return True<EOL><DEDENT>return False<EOL>
|
5.3 Hub Verifies Intent of the Subscriber
|
f11351:m6
|
def __init__(self, path):
|
self.path = path<EOL>with self.connection() as connection:<EOL><INDENT>connection.execute(self.TABLE_SETUP_SQL)<EOL><DEDENT>
|
Path should be where you want to save the sqlite3 database.
|
f11353:c0:m0
|
def __init__(self, client_id=None, client_secret=None, access_token=None, redirect_url=None):
|
<EOL>if (not client_id or not client_secret) and not access_token:<EOL><INDENT>error_message = '<STR_LIT>'<EOL>logging.error(error_message)<EOL>raise UntappdException(error_message)<EOL><DEDENT>self.requester = self.Requester(client_id, client_secret, access_token)<EOL>self.oauth = self.OAuth(self.requester, client_id, client_secret, redirect_url)<EOL>self._attach_endpoints()<EOL>
|
Sets up the API client object
|
f11358:c2:m0
|
def _attach_endpoints(self):
|
for name, value in inspect.getmembers(self):<EOL><INDENT>if inspect.isclass(value) and issubclass(value, self._Endpoint) and (value is not self._Endpoint):<EOL><INDENT>endpoint_instance = value(self.requester)<EOL>setattr(self, endpoint_instance.endpoint_base, endpoint_instance)<EOL>if not hasattr(endpoint_instance, '<STR_LIT>'):<EOL><INDENT>endpoint_instance.get_endpoints = ()<EOL><DEDENT>if not hasattr(endpoint_instance, '<STR_LIT>'):<EOL><INDENT>endpoint_instance.post_endpoints = ()<EOL><DEDENT>if not hasattr(endpoint_instance, '<STR_LIT>'):<EOL><INDENT>endpoint_instance.is_callable = False<EOL><DEDENT>for endpoint in (endpoint_instance.get_endpoints + endpoint_instance.post_endpoints):<EOL><INDENT>function = endpoint_instance.create_endpoint_function(endpoint)<EOL>function_name = endpoint.replace('<STR_LIT:/>', '<STR_LIT:_>')<EOL>setattr(endpoint_instance, function_name, function)<EOL>function.__name__ = str(function_name)<EOL>function.__doc__ = '<STR_LIT>'.format(endpoint)<EOL><DEDENT><DEDENT><DEDENT>
|
Dynamically attaches endpoint callables to this client
|
f11358:c2:m1
|
def set_access_token(self, access_token):
|
self.requester.set_access_token(access_token)<EOL>
|
Updates the access token to use
|
f11358:c2:m2
|
def show():
|
_runapp(_global_config)<EOL>
|
Launch geoplotlib
|
f11377:m1
|
def savefig(fname):
|
_global_config.savefig = fname<EOL>_runapp(_global_config)<EOL>
|
Launch geoplotlib, saves a screeshot and terminates
|
f11377:m2
|
def inline(width=<NUM_LIT>):
|
from IPython.display import Image, HTML, display, clear_output<EOL>import random<EOL>import string<EOL>import urllib<EOL>import os<EOL>while True:<EOL><INDENT>fname = '<STR_LIT>'.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(<NUM_LIT:32>))<EOL>if not os.path.isfile(fname + '<STR_LIT>'):<EOL><INDENT>break<EOL><DEDENT><DEDENT>savefig(fname)<EOL>if os.path.isfile(fname + '<STR_LIT>'):<EOL><INDENT>with open(fname + '<STR_LIT>', '<STR_LIT:rb>') as fin:<EOL><INDENT>encoded = base64.b64encode(fin.read())<EOL>b64 = urllib.parse.quote(encoded)<EOL><DEDENT>image_html = "<STR_LIT>" % (width, b64)<EOL>display(HTML(image_html))<EOL>os.remove(fname + '<STR_LIT>')<EOL><DEDENT>
|
display the map inline in ipython
:param width: image width for the browser
|
f11377:m3
|
def dot(data, color=None, point_size=<NUM_LIT:2>, f_tooltip=None):
|
from geoplotlib.layers import DotDensityLayer<EOL>_global_config.layers.append(DotDensityLayer(data, color=color, point_size=point_size, f_tooltip=f_tooltip))<EOL>
|
Create a dot density map
:param data: data access object
:param color: color
:param point_size: point size
:param f_tooltip: function to return a tooltip string for a point
|
f11377:m4
|
def scatter(data, color=None, point_size=<NUM_LIT:2>, f_tooltip=None):
|
import warnings<EOL>warnings.warn("<STR_LIT>", DeprecationWarning)<EOL>dot(data, color, point_size, f_tooltip)<EOL>
|
Deprecated: use dot
|
f11377:m5
|
def hist(data, cmap='<STR_LIT>', alpha=<NUM_LIT>, colorscale='<STR_LIT>', binsize=<NUM_LIT:16>, show_tooltip=False,<EOL>scalemin=<NUM_LIT:0>, scalemax=None, f_group=None, show_colorbar=True):
|
from geoplotlib.layers import HistogramLayer<EOL>_global_config.layers.append(HistogramLayer(data, cmap=cmap, alpha=alpha, colorscale=colorscale,<EOL>binsize=binsize, show_tooltip=show_tooltip, scalemin=scalemin, <EOL>scalemax=scalemax, f_group=f_group, show_colorbar=show_colorbar))<EOL>
|
Create a 2D histogram
:param data: data access object
:param cmap: colormap name
:param alpha: color alpha
:param colorscale: scaling [lin, log, sqrt]
:param binsize: size of the hist bins
:param show_tooltip: if True, will show the value of bins on mouseover
:param scalemin: min value for displaying a bin
:param scalemax: max value for a bin
:param f_group: function to apply to samples in the same bin. Default is to count
:param show_colorbar: show colorbar
|
f11377:m6
|
def graph(data, src_lat, src_lon, dest_lat, dest_lon, linewidth=<NUM_LIT:1>, alpha=<NUM_LIT>, color='<STR_LIT>'):
|
from geoplotlib.layers import GraphLayer<EOL>_global_config.layers.append(GraphLayer(data, src_lat, src_lon, dest_lat, dest_lon, linewidth, alpha, color))<EOL>
|
Create a graph drawing a line between each pair of (src_lat, src_lon) and (dest_lat, dest_lon)
:param data: data access object
:param src_lat: field name of source latitude
:param src_lon: field name of source longitude
:param dest_lat: field name of destination latitude
:param dest_lon: field name of destination longitude
:param linewidth: line width
:param alpha: color alpha
:param color: color or colormap
|
f11377:m7
|
def shapefiles(fname, f_tooltip=None, color=None, linewidth=<NUM_LIT:3>, shape_type='<STR_LIT>'):
|
from geoplotlib.layers import ShapefileLayer<EOL>_global_config.layers.append(ShapefileLayer(fname, f_tooltip, color, linewidth, shape_type))<EOL>
|
Load and draws shapefiles
:param fname: full path to the shapefile
:param f_tooltip: function to generate a tooltip on mouseover
:param color: color
:param linewidth: line width
:param shape_type: either full or bbox
|
f11377:m8
|
def voronoi(data, line_color=None, line_width=<NUM_LIT:2>, f_tooltip=None, cmap=None, max_area=<NUM_LIT>, alpha=<NUM_LIT>):
|
from geoplotlib.layers import VoronoiLayer<EOL>_global_config.layers.append(VoronoiLayer(data, line_color, line_width, f_tooltip, cmap, max_area, alpha))<EOL>
|
Draw the voronoi tesselation of the points
:param data: data access object
:param line_color: line color
:param line_width: line width
:param f_tooltip: function to generate a tooltip on mouseover
:param cmap: color map
:param max_area: scaling constant to determine the color of the voronoi areas
:param alpha: color alpha
|
f11377:m9
|
def delaunay(data, line_color=None, line_width=<NUM_LIT:2>, cmap=None, max_lenght=<NUM_LIT:100>):
|
from geoplotlib.layers import DelaunayLayer<EOL>_global_config.layers.append(DelaunayLayer(data, line_color, line_width, cmap, max_lenght))<EOL>
|
Draw a delaunay triangulation of the points
:param data: data access object
:param line_color: line color
:param line_width: line width
:param cmap: color map
:param max_lenght: scaling constant for coloring the edges
|
f11377:m10
|
def convexhull(data, col, fill=True, point_size=<NUM_LIT:4>):
|
from geoplotlib.layers import ConvexHullLayer<EOL>_global_config.layers.append(ConvexHullLayer(data, col, fill, point_size))<EOL>
|
Convex hull for a set of points
:param data: points
:param col: color
:param fill: whether to fill the convexhull polygon or not
:param point_size: size of the points on the convexhull. Points are not rendered if None
|
f11377:m11
|
def kde(data, bw, cmap='<STR_LIT>', method='<STR_LIT>', scaling='<STR_LIT>', alpha=<NUM_LIT>,<EOL>cut_below=None, clip_above=None, binsize=<NUM_LIT:1>, cmap_levels=<NUM_LIT:10>, show_colorbar=False):
|
from geoplotlib.layers import KDELayer<EOL>_global_config.layers.append(KDELayer(data, bw, cmap, method, scaling, alpha,<EOL>cut_below, clip_above, binsize, cmap_levels, show_colorbar))<EOL>
|
Kernel density estimation visualization
:param data: data access object
:param bw: kernel bandwidth (in screen coordinates)
:param cmap: colormap
:param method: if kde use KDEMultivariate from statsmodel, which provides a more accurate but much slower estimation.
If hist, estimates density applying gaussian smoothing on a 2D histogram, which is much faster but less accurate
:param scaling: colorscale, lin log or sqrt
:param alpha: color alpha
:param cut_below: densities below cut_below are not drawn
:param clip_above: defines the max value for the colorscale
:param binsize: size of the bins for hist estimator
:param cmap_levels: discretize colors into cmap_levels levels
:param show_colorbar: show colorbar
|
f11377:m12
|
def markers(data, marker, f_tooltip=None, marker_preferred_size=<NUM_LIT:32>):
|
from geoplotlib.layers import MarkersLayer<EOL>_global_config.layers.append(MarkersLayer(data, marker, f_tooltip, marker_preferred_size))<EOL>
|
Draw markers
:param data: data access object
:param marker: full filename of the marker image
:param f_tooltip: function to generate a tooltip on mouseover
:param marker_preferred_size: size in pixel for the marker images
|
f11377:m13
|
def geojson(filename, color='<STR_LIT:b>', linewidth=<NUM_LIT:1>, fill=False, f_tooltip=None):
|
from geoplotlib.layers import GeoJSONLayer<EOL>_global_config.layers.append(GeoJSONLayer(filename, color=color, linewidth=linewidth, fill=fill, f_tooltip=f_tooltip))<EOL>
|
Draw features described in geojson format (http://geojson.org/)
:param filename: filename of the geojson file
:param color: color for the shapes. If callable, it will be invoked for each feature, passing the properties element
:param linewidth: line width
:param fill: if fill=True the feature polygon is filled, otherwise just the border is rendered
:param f_tooltip: function to generate a tooltip on mouseover. It will be invoked for each feature, passing the properties element
|
f11377:m14
|
def labels(data, label_column, color=None, font_name=FONT_NAME, <EOL>font_size=<NUM_LIT>, anchor_x='<STR_LIT:left>', anchor_y='<STR_LIT>'):
|
from geoplotlib.layers import LabelsLayer<EOL>_global_config.layers.append(LabelsLayer(data, label_column, color, font_name, <EOL>font_size, anchor_x, anchor_y))<EOL>
|
Draw a text label for each sample
:param data: data access object
:param label_column: column in the data access object where the labels text is stored
:param color: color
:param font_name: font name
:param font_size: font size
:param anchor_x: anchor x
:param anchor_y: anchor y
|
f11377:m15
|
def grid(lon_edges, lat_edges, values, cmap, alpha=<NUM_LIT:255>, vmin=None, vmax=None, levels=<NUM_LIT:10>, colormap_scale='<STR_LIT>', show_colorbar=True):
|
from geoplotlib.layers import GridLayer<EOL>_global_config.layers.append(<EOL>GridLayer(lon_edges, lat_edges, values, cmap, alpha, vmin, vmax, levels, colormap_scale, show_colorbar))<EOL>
|
Values on a uniform grid
:param lon_edges: longitude edges
:param lat_edges: latitude edges
:param values: matrix representing values on the grid
:param cmap: colormap name
:param alpha: color alpha
:param vmin: minimum value for the colormap
:param vmax: maximum value for the colormap
:param levels: number of levels for the colormap
:param colormap_scale: colormap scale
:param show_colorbar: show the colorbar in the UI
|
f11377:m16
|
def clear():
|
_global_config.layers = []<EOL>
|
Remove all existing layers
|
f11377:m17
|
def tiles_provider(tiles_provider):
|
_global_config.tiles_provider = tiles_provider<EOL>
|
Set the tile provider
:param tiles_provider: either one of the built-in providers
['watercolor', 'toner', 'toner-lite', 'mapquest', 'darkmatter','positron']
or a custom provider in the form
{'url': lambda zoom, xtile, ytile: 'someurl' % (zoom, xtile, ytile),
'tiles_dir': 'mytiles',
'attribution': 'my attribution'
})
|
f11377:m18
|
def add_layer(layer):
|
_global_config.layers.append(layer)<EOL>
|
Add a layer
:param layer: a BaseLayer object
|
f11377:m19
|
def set_bbox(bbox):
|
_global_config.bbox = bbox<EOL>
|
Set the map bounding box
:param bbox: a BoundingBox object
|
f11377:m20
|
def set_smoothing(smoothing):
|
_global_config.smoothing = smoothing<EOL>
|
Enables OpenGL lines smoothing (antialiasing)
:param smoothing: smoothing enabled or disabled
|
f11377:m21
|
def set_map_alpha(alpha):
|
if alpha < <NUM_LIT:0> or alpha > <NUM_LIT:255>:<EOL><INDENT>raise Exception('<STR_LIT>' + str(alpha))<EOL><DEDENT>_global_config.map_alpha = alpha<EOL>
|
Alpha color of the map tiles
:param alpha: int between 0 and 255. 0 is completely dark, 255 is full brightness
|
f11377:m22
|
def set_window_size(w, h):
|
_global_config.screen_w = w<EOL>_global_config.screen_h = h<EOL>
|
Set the geoplotlib window size
:param w: window width
:param h: window height
|
f11377:m23
|
def invalidate(self, proj):
|
pass<EOL>
|
This method is called each time layers need to be redrawn, i.e. on zoom.
Typically in this method a BatchPainter is instantiated and all the rendering is performed
:param proj: the current Projector object
|
f11378:c1:m0
|
def draw(self, proj, mouse_x, mouse_y, ui_manager):
|
pass<EOL>
|
This method is called at every frame, and typically executes BatchPainter.batch_draw()
:param proj: the current Projector object
:param mouse_x: mouse x
:param mouse_y: mouse y
:param ui_manager: the current UiManager
|
f11378:c1:m1
|
def bbox(self):
|
return BoundingBox.WORLD<EOL>
|
Return the bounding box for this layer
|
f11378:c1:m2
|
def on_key_release(self, key, modifiers):
|
return False<EOL>
|
Override this method for custom handling of keystrokes
:param key: the key that has been released
:param modifiers: the key modifiers
:return: True if the layer needs to call invalidate
|
f11378:c1:m3
|
def __init__(self, data, color=None, point_size=<NUM_LIT:2>, f_tooltip=None):
|
self.data = data<EOL>self.color = color<EOL>if self.color is None:<EOL><INDENT>self.color = [<NUM_LIT:255>,<NUM_LIT:0>,<NUM_LIT:0>]<EOL><DEDENT>self.point_size = point_size<EOL>self.f_tooltip = f_tooltip<EOL>self.hotspots = HotspotManager()<EOL>
|
Create a dot density map
:param data: data access object
:param color: color
:param point_size: point size
:param f_tooltip: function to return a tooltip string for a point
|
f11378:c2:m0
|
def __init__(self, data, cmap='<STR_LIT>', alpha=<NUM_LIT>, colorscale='<STR_LIT>', binsize=<NUM_LIT:16>, <EOL>show_tooltip=False, scalemin=<NUM_LIT:0>, scalemax=None, f_group=None, show_colorbar=True):
|
self.data = data<EOL>self.cmap = colors.ColorMap(cmap, alpha=alpha)<EOL>self.binsize = binsize<EOL>self.show_tooltip = show_tooltip<EOL>self.scalemin = scalemin<EOL>self.scalemax = scalemax<EOL>self.colorscale = colorscale<EOL>self.f_group = f_group<EOL>if self.f_group is None:<EOL><INDENT>self.f_group = lambda grp: len(grp)<EOL><DEDENT>self.show_colorbar = show_colorbar<EOL>
|
Create a 2D histogram
:param data: data access object
:param cmap: colormap name
:param alpha: color alpha
:param colorscale: scaling [lin, log, sqrt]
:param binsize: size of the hist bins
:param show_tooltip: if True, will show the value of bins on mouseover
:param scalemin: min value for displaying a bin
:param scalemax: max value for a bin
:param f_group: function to apply to samples in the same bin. Default is to count
:param show_colorbar: show colorbar
:return:
|
f11378:c3:m0
|
def __init__(self, data, src_lat, src_lon, dest_lat, dest_lon, linewidth=<NUM_LIT:1>, alpha=<NUM_LIT>, color='<STR_LIT>'):
|
self.data = data<EOL>self.src_lon = src_lon<EOL>self.src_lat = src_lat<EOL>self.dest_lon = dest_lon<EOL>self.dest_lat = dest_lat<EOL>self.linewidth = linewidth<EOL>alpha = alpha<EOL>self.color = color<EOL>if type(self.color) == str:<EOL><INDENT>self.cmap = colors.ColorMap(self.color, alpha)<EOL><DEDENT>
|
Create a graph drawing a line between each pair of (src_lat, src_lon) and (dest_lat, dest_lon)
:param data: data access object
:param src_lat: field name of source latitude
:param src_lon: field name of source longitude
:param dest_lat: field name of destination latitude
:param dest_lon: field name of destination longitude
:param linewidth: line width
:param alpha: color alpha
:param color: color or colormap
|
f11378:c4:m0
|
def __init__(self, fname, f_tooltip=None, color=None, linewidth=<NUM_LIT:3>, shape_type='<STR_LIT>'):
|
if color is None:<EOL><INDENT>color = [<NUM_LIT:255>, <NUM_LIT:0>, <NUM_LIT:0>]<EOL><DEDENT>self.color = color<EOL>self.linewidth = linewidth<EOL>self.f_tooltip = f_tooltip<EOL>self.shape_type = shape_type<EOL>try:<EOL><INDENT>import shapefile<EOL><DEDENT>except:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>self.reader = shapefile.Reader(fname)<EOL>self.worker = None<EOL>self.queue = Queue.Queue()<EOL>
|
Loads and draws shapefiles
:param fname: full path to the shapefile
:param f_tooltip: function to generate a tooltip on mouseover
:param color: color
:param linewidth: line width
:param shape_type: either full or bbox
|
f11378:c5:m0
|
def __init__(self, data, line_color=None, line_width=<NUM_LIT:2>, cmap=None, max_lenght=<NUM_LIT:100>):
|
self.data = data<EOL>if cmap is None and line_color is None:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>if cmap is not None:<EOL><INDENT>cmap = colors.ColorMap(cmap, alpha=<NUM_LIT>)<EOL><DEDENT>self.cmap = cmap<EOL>self.line_color = line_color<EOL>self.line_width = line_width<EOL>self.max_lenght = max_lenght<EOL>
|
Draw a delaunay triangulation of the points
:param data: data access object
:param line_color: line color
:param line_width: line width
:param cmap: color map
:param max_lenght: scaling constant for coloring the edges
|
f11378:c7:m0
|
def __init__(self, data, line_color=None, line_width=<NUM_LIT:2>, f_tooltip=None, cmap=None, max_area=<NUM_LIT>, alpha=<NUM_LIT>):
|
self.data = data<EOL>if cmap is None and line_color is None:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>if cmap is not None:<EOL><INDENT>cmap = colors.ColorMap(cmap, alpha=alpha, levels=<NUM_LIT:10>)<EOL><DEDENT>self.cmap = cmap<EOL>self.line_color = line_color<EOL>self.line_width = line_width<EOL>self.f_tooltip = f_tooltip<EOL>self.max_area = max_area<EOL>
|
Draw the voronoi tesselation of the points from data
:param data: data access object
:param line_color: line color
:param line_width: line width
:param f_tooltip: function to generate a tooltip on mouseover
:param cmap: color map
:param max_area: scaling constant to determine the color of the voronoi areas
:param alpha: color alpha
:return:
|
f11378:c8:m0
|
@staticmethod<EOL><INDENT>def __voronoi_finite_polygons_2d(vor, radius=None):<DEDENT>
|
if vor.points.shape[<NUM_LIT:1>] != <NUM_LIT:2>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>new_regions = []<EOL>new_vertices = vor.vertices.tolist()<EOL>center = vor.points.mean(axis=<NUM_LIT:0>)<EOL>if radius is None:<EOL><INDENT>radius = vor.points.ptp().max()<EOL><DEDENT>all_ridges = {}<EOL>for (p1, p2), (v1, v2) in zip(vor.ridge_points, vor.ridge_vertices):<EOL><INDENT>all_ridges.setdefault(p1, []).append((p2, v1, v2))<EOL>all_ridges.setdefault(p2, []).append((p1, v1, v2))<EOL><DEDENT>for p1, region in enumerate(vor.point_region):<EOL><INDENT>vertices = vor.regions[region]<EOL>if all(v >= <NUM_LIT:0> for v in vertices):<EOL><INDENT>new_regions.append(vertices)<EOL>continue<EOL><DEDENT>if p1 not in all_ridges:<EOL><INDENT>continue<EOL><DEDENT>ridges = all_ridges[p1]<EOL>new_region = [v for v in vertices if v >= <NUM_LIT:0>]<EOL>for p2, v1, v2 in ridges:<EOL><INDENT>if v2 < <NUM_LIT:0>:<EOL><INDENT>v1, v2 = v2, v1<EOL><DEDENT>if v1 >= <NUM_LIT:0>:<EOL><INDENT>continue<EOL><DEDENT>t = vor.points[p2] - vor.points[p1] <EOL>t /= np.linalg.norm(t)<EOL>n = np.array([-t[<NUM_LIT:1>], t[<NUM_LIT:0>]]) <EOL>midpoint = vor.points[[p1, p2]].mean(axis=<NUM_LIT:0>)<EOL>direction = np.sign(np.dot(midpoint - center, n)) * n<EOL>far_point = vor.vertices[v2] + direction * radius<EOL>new_region.append(len(new_vertices))<EOL>new_vertices.append(far_point.tolist())<EOL><DEDENT>vs = np.asarray([new_vertices[v] for v in new_region])<EOL>c = vs.mean(axis=<NUM_LIT:0>)<EOL>angles = np.arctan2(vs[:,<NUM_LIT:1>] - c[<NUM_LIT:1>], vs[:,<NUM_LIT:0>] - c[<NUM_LIT:0>])<EOL>new_region = np.array(new_region)[np.argsort(angles)]<EOL>new_regions.append(new_region.tolist())<EOL><DEDENT>return new_regions, np.asarray(new_vertices)<EOL>
|
Reconstruct infinite voronoi regions in a 2D diagram to finite
regions.
Parameters
----------
vor : Voronoi
Input diagram
radius : float, optional
Distance to 'points at infinity'.
Returns
-------
regions : list of tuples
Indices of vertices in each revised Voronoi regions.
vertices : list of tuples
Coordinates for revised Voronoi vertices. Same as coordinates
of input vertices, with 'points at infinity' appended to the
end.
|
f11378:c8:m1
|
def __init__(self, data, marker, f_tooltip=None, marker_preferred_size=<NUM_LIT:32>):
|
self.data = data<EOL>self.f_tooltip = f_tooltip<EOL>self.marker_preferred_size = float(marker_preferred_size)<EOL>self.marker = pyglet.image.load(marker)<EOL>self.marker.anchor_x = self.marker.width / <NUM_LIT:2><EOL>self.marker.anchor_y = self.marker.height / <NUM_LIT:2><EOL>self.scale = self.marker_preferred_size / max(self.marker.width, self.marker.height)<EOL>self.hotspots = HotspotManager()<EOL>
|
Draw markers
:param data: data access object
:param marker: full filename of the marker image
:param f_tooltip: function to generate a tooltip on mouseover
:param marker_preferred_size: size in pixel for the marker images
|
f11378:c9:m0
|
def __init__(self, values, bw, cmap='<STR_LIT>', method='<STR_LIT>', scaling='<STR_LIT>', alpha=<NUM_LIT>,<EOL>cut_below=None, clip_above=None, binsize=<NUM_LIT:1>, cmap_levels=<NUM_LIT:10>, show_colorbar=False):
|
self.values = values<EOL>self.bw = bw<EOL>self.cmap = colors.ColorMap(cmap, alpha=alpha, levels=cmap_levels)<EOL>self.method = method<EOL>self.scaling = scaling<EOL>self.cut_below = cut_below<EOL>self.clip_above = clip_above<EOL>self.binsize = binsize<EOL>self.show_colorbar = show_colorbar<EOL>
|
Kernel density estimation visualization
:param data: data access object
:param bw: kernel bandwidth (in screen coordinates)
:param cmap: colormap
:param method: if kde use KDEMultivariate from statsmodel, which provides a more accurate but much slower estimation.
If hist, estimates density applying gaussian smoothing on a 2D histogram, which is much faster but less accurate
:param scaling: colorscale, lin log or sqrt
:param alpha: color alpha
:param cut_below: densities below cut_below are not drawn
:param clip_above: defines the max value for the colorscale
:param binsize: size of the bins for hist estimator
:param cmap_levels: discretize colors into cmap_levels
:param show_colorbar: show colorbar
|
f11378:c10:m0
|
def __init__(self, data, col, fill=True, point_size=<NUM_LIT:4>):
|
self.data = data<EOL>self.col = col<EOL>self.fill = fill<EOL>self.point_size=point_size<EOL>
|
Convex hull for a set of points
:param data: points
:param col: color
:param fill: whether to fill the convexhull polygon or not
:param point_size: size of the points on the convexhull. Points are not rendered if None
|
f11378:c11:m0
|
def __init__(self, lon_edges, lat_edges, values, cmap, alpha=<NUM_LIT:255>, vmin=None, vmax=None, levels=<NUM_LIT:10>, <EOL>colormap_scale='<STR_LIT>', show_colorbar=True):
|
self.lon_edges = lon_edges<EOL>self.lat_edges = lat_edges<EOL>self.values = values<EOL>self.cmap = colors.ColorMap(cmap, alpha=alpha, levels=levels)<EOL>self.colormap_scale = colormap_scale<EOL>self.show_colorbar = show_colorbar<EOL>if vmin:<EOL><INDENT>self.vmin = vmin<EOL><DEDENT>else:<EOL><INDENT>self.vmin = <NUM_LIT:0><EOL><DEDENT>if vmax:<EOL><INDENT>self.vmax = vmax<EOL><DEDENT>else:<EOL><INDENT>self.vmax = self.values[~np.isnan(self.values)].max()<EOL><DEDENT>
|
Values over a uniform grid
:param lon_edges: longitude edges
:param lat_edges: latitude edges
:param values: matrix representing values on the grid
:param cmap: colormap name
:param alpha: color alpha
:param vmin: minimum value for the colormap
:param vmax: maximum value for the colormap
:param levels: number of levels for the colormap
:param colormap_scale: colormap scale
:param show_colorbar: show the colorbar in the UI
|
f11378:c12:m0
|
def __init__(self, data, label_column, color=None, font_name=FONT_NAME, font_size=<NUM_LIT>, anchor_x='<STR_LIT:left>', anchor_y='<STR_LIT>'):
|
self.data = data<EOL>self.label_column = label_column<EOL>self.color = color<EOL>self.font_name = font_name<EOL>self.font_size = font_size<EOL>self.anchor_x = anchor_x<EOL>self.anchor_y = anchor_y<EOL>if self.color is None:<EOL><INDENT>self.color = [<NUM_LIT:255>,<NUM_LIT:0>,<NUM_LIT:0>]<EOL><DEDENT>
|
Create a layer with a text label for each sample
:param data: data access object
:param label_column: column in the data access object where the labels text is stored
:param color: color
:param font_name: font name
:param font_size: font size
:param anchor_x: anchor x
:param anchor_y: anchor y
|
f11378:c14:m0
|
def __generate_spline(self, x, y, closed=False, steps=<NUM_LIT:20>):
|
if closed:<EOL><INDENT>x = x.tolist()<EOL>x.insert(<NUM_LIT:0>, x[-<NUM_LIT:1>])<EOL>x.append(x[<NUM_LIT:1>])<EOL>x.append(x[<NUM_LIT:2>])<EOL>y = y.tolist()<EOL>y.insert(<NUM_LIT:0>, y[-<NUM_LIT:1>])<EOL>y.append(y[<NUM_LIT:1>])<EOL>y.append(y[<NUM_LIT:2>])<EOL><DEDENT>points = np.vstack((x,y)).T<EOL>curve = []<EOL>if not closed:<EOL><INDENT>curve.append(points[<NUM_LIT:0>])<EOL><DEDENT>for j in range(<NUM_LIT:1>, len(points)-<NUM_LIT:2>):<EOL><INDENT>for s in range(steps):<EOL><INDENT>t = <NUM_LIT:1.> * s / steps<EOL>p0, p1, p2, p3 = points[j-<NUM_LIT:1>], points[j], points[j+<NUM_LIT:1>], points[j+<NUM_LIT:2>]<EOL>pnew = <NUM_LIT:0.5> *((<NUM_LIT:2> * p1) + (-p0 + p2) * t + (<NUM_LIT:2>*p0 - <NUM_LIT:5>*p1 + <NUM_LIT:4>*p2 - p3) * t**<NUM_LIT:2> + (-p0 + <NUM_LIT:3>*p1- <NUM_LIT:3>*p2 + p3) * t**<NUM_LIT:3>)<EOL>curve.append(pnew)<EOL><DEDENT><DEDENT>if not closed:<EOL><INDENT>curve.append(points[-<NUM_LIT:1>])<EOL><DEDENT>curve = np.array(curve)<EOL>return curve[:, <NUM_LIT:0>], curve[:, <NUM_LIT:1>]<EOL>
|
catmullrom spline
http://www.mvps.org/directx/articles/catmull/
|
f11379:c2:m14
|
def fit(self, bbox, max_zoom=MAX_ZOOM, force_zoom=None):
|
BUFFER_FACTOR = <NUM_LIT><EOL>if force_zoom is not None:<EOL><INDENT>self.zoom = force_zoom<EOL><DEDENT>else:<EOL><INDENT>for zoom in range(max_zoom, MIN_ZOOM-<NUM_LIT:1>, -<NUM_LIT:1>):<EOL><INDENT>self.zoom = zoom<EOL>left, top = self.lonlat_to_screen([bbox.west], [bbox.north])<EOL>right, bottom = self.lonlat_to_screen([bbox.east], [bbox.south])<EOL>if (top - bottom < SCREEN_H*BUFFER_FACTOR) and (right - left < SCREEN_W*BUFFER_FACTOR):<EOL><INDENT>break<EOL><DEDENT><DEDENT><DEDENT>west_tile, north_tile = self.deg2num(bbox.north, bbox.west, self.zoom)<EOL>east_tile, south_tile = self.deg2num(bbox.south, bbox.east, self.zoom)<EOL>self.xtile = west_tile - self.tiles_horizontally/<NUM_LIT> + (east_tile - west_tile)/<NUM_LIT:2><EOL>self.ytile = north_tile - self.tiles_vertically/<NUM_LIT> + (south_tile - north_tile)/<NUM_LIT:2><EOL>self.calculate_viewport_size()<EOL>
|
Fits the projector to a BoundingBox
:param bbox: BoundingBox
:param max_zoom: max zoom allowed
:param force_zoom: force this specific zoom value even if the whole bbox does not completely fit
|
f11379:c3:m2
|
def lonlat_to_screen(self, lon, lat):
|
if type(lon) == list:<EOL><INDENT>lon = np.array(lon)<EOL><DEDENT>if type(lat) == list:<EOL><INDENT>lat = np.array(lat)<EOL><DEDENT>lat_rad = np.radians(lat)<EOL>n = <NUM_LIT> ** self.zoom<EOL>xtile = (lon + <NUM_LIT>) / <NUM_LIT> * n<EOL>ytile = (<NUM_LIT:1.0> - np.log(np.tan(lat_rad) + (<NUM_LIT:1> / np.cos(lat_rad))) / math.pi) / <NUM_LIT> * n<EOL>x = (xtile * TILE_SIZE).astype(int)<EOL>y = (SCREEN_H - ytile * TILE_SIZE).astype(int)<EOL>return x, y<EOL>
|
Projects geodesic coordinates to screen
:param lon: longitude
:param lat: latitude
:return: x,y screen coordinates
|
f11379:c3:m10
|
def screen_to_latlon(self, x, y):
|
xtile = <NUM_LIT:1.> * x / TILE_SIZE + self.xtile<EOL>ytile = <NUM_LIT:1.> * y / TILE_SIZE + self.ytile<EOL>return self.num2deg(xtile, ytile, self.zoom)<EOL>
|
Return the latitude and longitude corresponding to a screen point
:param x: screen x
:param y: screen y
:return: latitude and longitude at x,y
|
f11379:c3:m11
|
def haversine(lon1, lat1, lon2, lat2):
|
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])<EOL>dlon = lon2 - lon1<EOL>dlat = lat2 - lat1<EOL>a = sin(dlat/<NUM_LIT:2>)**<NUM_LIT:2> + cos(lat1) * cos(lat2) * sin(dlon/<NUM_LIT:2>)**<NUM_LIT:2><EOL>c = <NUM_LIT:2> * asin(sqrt(a))<EOL>m = <NUM_LIT> * c<EOL>return m<EOL>
|
Distance between geodesic coordinates http://www.movable-type.co.uk/scripts/latlong.html
:param lon1: point 1 latitude
:param lat1: point 1 longitude
:param lon2: point 1 latitude
:param lat2: point 2 longitude
:return: distance in meters between points 1 and 2
|
f11380:m0
|
def read_csv(fname):
|
values = defaultdict(list)<EOL>with open(fname) as f:<EOL><INDENT>reader = csv.DictReader(f)<EOL>for row in reader:<EOL><INDENT>for (k,v) in row.items():<EOL><INDENT>values[k].append(v)<EOL><DEDENT><DEDENT><DEDENT>npvalues = {k: np.array(values[k]) for k in values.keys()}<EOL>for k in npvalues.keys():<EOL><INDENT>for datatype in [np.int, np.float]:<EOL><INDENT>try:<EOL><INDENT>npvalues[k][:<NUM_LIT:1>].astype(datatype)<EOL>npvalues[k] = npvalues[k].astype(datatype)<EOL>break<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT>dao = DataAccessObject(npvalues)<EOL>return dao<EOL>
|
Read a csv file into a DataAccessObject
:param fname: filename
|
f11380:m1
|
def epoch_to_str(epoch, fmt='<STR_LIT>'):
|
return datetime.fromtimestamp(epoch).strftime(fmt)<EOL>
|
Convert a unix timestamp into date string
:param epoch: unix timestamp
:param fmt: date format
:return: formatted date from timestamp
|
f11380:m2
|
def __init__(self, dict_or_df):
|
if type(dict_or_df) == dict:<EOL><INDENT>self.dict = dict_or_df<EOL><DEDENT>else:<EOL><INDENT>from pandas import DataFrame<EOL>if type(dict_or_df) == DataFrame:<EOL><INDENT>self.dict = {col: dict_or_df[col].values for col in dict_or_df.columns}<EOL><DEDENT>else:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT><DEDENT>
|
Create a DataAccessObject either from a dictionary or a pandas.DataFrame
|
f11380:c0:m0
|
@staticmethod<EOL><INDENT>def from_dataframe(df):<DEDENT>
|
import warnings<EOL>warnings.warn('<STR_LIT>', DeprecationWarning)<EOL>return DataAccessObject(df)<EOL>
|
Loads data from a pandas DataFrame
:param df: dataframe
:return: a DataAccessObject
|
f11380:c0:m1
|
def rename(self, mapping):
|
for old_key, new_key in mapping:<EOL><INDENT>self.dict[new_key] = self.dict[old_key]<EOL>del self.dict[old_key]<EOL><DEDENT>
|
Rename fields
:param mapping: a dict in the format {'oldkey1': 'newkey1', ...}
|
f11380:c0:m5
|
def where(self, mask):
|
assert len(mask) == len(self)<EOL>return DataAccessObject({k: self.dict[k][mask] for k in self.dict})<EOL>
|
:param mask: boolean mask
:return: a DataAccessObject with a subset of rows matching mask
|
f11380:c0:m6
|
def head(self, n):
|
return DataAccessObject({k: self.dict[k][:n] for k in self.dict})<EOL>
|
Return a DataAccessObject containing the first n rows
:param n: number of rows
:return: DataAccessObject
|
f11380:c0:m8
|
def keys(self):
|
return self.dict.keys()<EOL>
|
:return: the keys (field names)
|
f11380:c0:m9
|
def values(self):
|
return self.dict.values()<EOL>
|
:return: the values (field values)
|
f11380:c0:m10
|
def __init__(self, north, west, south, east):
|
self.north = north<EOL>self.west = west<EOL>self.south = south<EOL>self.east = east<EOL>
|
Represent a map boundingbox
:param north: northmost latitude
:param west: westmost longitude
:param south: southmost latitude
:param east: eastmost longitude
:return:
|
f11380:c1:m0
|
@staticmethod<EOL><INDENT>def from_points(lons, lats):<DEDENT>
|
north, west = max(lats), min(lons)<EOL>south, east = min(lats), max(lons)<EOL>return BoundingBox(north=north, west=west, south=south, east=east)<EOL>
|
Compute the BoundingBox from a set of latitudes and longitudes
:param lons: longitudes
:param lats: latitudes
:return: BoundingBox
|
f11380:c1:m1
|
@staticmethod<EOL><INDENT>def from_bboxes(bboxes):<DEDENT>
|
north = max([b.north for b in bboxes])<EOL>south = min([b.south for b in bboxes])<EOL>west = min([b.west for b in bboxes])<EOL>east = max([b.east for b in bboxes])<EOL>return BoundingBox(north=north, west=west, south=south, east=east)<EOL>
|
Compute a BoundingBox enclosing all specified bboxes
:param bboxes: a list of BoundingBoxes
:return: BoundingBox
|
f11380:c1:m2
|
def create_set_cmap(values, cmap_name, alpha=<NUM_LIT:255>):
|
unique_values = list(set(values))<EOL>shuffle(unique_values)<EOL>from pylab import get_cmap<EOL>cmap = get_cmap(cmap_name)<EOL>d = {}<EOL>for i in range(len(unique_values)):<EOL><INDENT>d[unique_values[i]] = _convert_color_format(cmap(<NUM_LIT:1.>*i/len(unique_values)), alpha)<EOL><DEDENT>return d<EOL>
|
return a dict of colors corresponding to the unique values
:param values: values to be mapped
:param cmap_name: colormap name
:param alpha: color alpha
:return: dict of colors corresponding to the unique values
|
f11381:m1
|
def colorbrewer(values, alpha=<NUM_LIT:255>):
|
basecolors = [<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>],<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>],<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>],<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>],<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>],<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>],<EOL>[<NUM_LIT:255>, <NUM_LIT>, <NUM_LIT:0>],<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>],<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>],<EOL>[<NUM_LIT:255>, <NUM_LIT:255>, <NUM_LIT>],<EOL>[<NUM_LIT>, <NUM_LIT>, <NUM_LIT>]<EOL>]<EOL>unique_values = list(set(values))<EOL>return {k: basecolors[i % len(basecolors)] + [alpha] for i, k in enumerate(unique_values)}<EOL>
|
Return a dict of colors for the unique values.
Colors are adapted from Harrower, Mark, and Cynthia A. Brewer.
"ColorBrewer. org: an online tool for selecting colour schemes for maps."
The Cartographic Journal 40.1 (2003): 27-37.
:param values: values
:param alpha: color alphs
:return: dict of colors for the unique values.
|
f11381:m2
|
def __init__(self, cmap_name, alpha=<NUM_LIT:255>, levels=<NUM_LIT:10>):
|
from pylab import get_cmap<EOL>self.cmap = get_cmap(cmap_name)<EOL>self.alpha = alpha<EOL>self.levels = levels<EOL>self.mapping = {}<EOL>
|
Converts continuous values into colors using matplotlib colorscales
:param cmap_name: colormap name
:param alpha: color alpha
:param levels: discretize the colorscale into levels
|
f11381:c0:m0
|
def to_color(self, value, maxvalue, scale, minvalue=<NUM_LIT:0.0>):
|
if scale == '<STR_LIT>':<EOL><INDENT>if minvalue >= maxvalue:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>value = <NUM_LIT:1.>*(value-minvalue) / (maxvalue-minvalue)<EOL><DEDENT><DEDENT>elif scale == '<STR_LIT>':<EOL><INDENT>if value < <NUM_LIT:1> or maxvalue <= <NUM_LIT:1>:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>value = math.log(value) / math.log(maxvalue)<EOL><DEDENT><DEDENT>elif scale == '<STR_LIT>':<EOL><INDENT>if value < <NUM_LIT:0> or maxvalue <= <NUM_LIT:0>:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>value = math.sqrt(value) / math.sqrt(maxvalue)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>raise Exception('<STR_LIT>')<EOL><DEDENT>if value < <NUM_LIT:0>:<EOL><INDENT>value = <NUM_LIT:0><EOL><DEDENT>elif value > <NUM_LIT:1>:<EOL><INDENT>value = <NUM_LIT:1><EOL><DEDENT>value = int(<NUM_LIT:1.>*self.levels*value)*<NUM_LIT:1.>/(self.levels-<NUM_LIT:1>)<EOL>if value not in self.mapping:<EOL><INDENT>self.mapping[value] = _convert_color_format(self.cmap(value), self.alpha)<EOL><DEDENT>return self.mapping[value]<EOL>
|
convert continuous values into colors using matplotlib colorscales
:param value: value to be converted
:param maxvalue: max value in the colorscale
:param scale: lin, log, sqrt
:param minvalue: minimum of the input values in linear scale (default is 0)
:return: the color corresponding to the value
|
f11381:c0:m1
|
def depth_of(parts: Sequence[str]) -> int:
|
return len(parts) - <NUM_LIT:1><EOL>
|
Calculate the depth of URL parts
:param parts: A list of URL parts
:return: Depth of the list
:private:
|
f11384:m0
|
def normalize_url(url: str) -> str:
|
if url.startswith('<STR_LIT:/>'):<EOL><INDENT>url = url[<NUM_LIT:1>:]<EOL><DEDENT>if url.endswith('<STR_LIT:/>'):<EOL><INDENT>url = url[:-<NUM_LIT:1>]<EOL><DEDENT>return url<EOL>
|
Remove leading and trailing slashes from a URL
:param url: URL
:return: URL with no leading and trailing slashes
:private:
|
f11384:m1
|
def _unwrap(variable_parts: VariablePartsType):
|
curr_parts = variable_parts<EOL>var_any = []<EOL>while curr_parts:<EOL><INDENT>curr_parts, (var_type, part) = curr_parts<EOL>if var_type == Routes._VAR_ANY_NODE:<EOL><INDENT>var_any.append(part)<EOL>continue<EOL><DEDENT>if var_type == Routes._VAR_ANY_BREAK:<EOL><INDENT>if var_any:<EOL><INDENT>yield tuple(reversed(var_any))<EOL>var_any.clear()<EOL><DEDENT>var_any.append(part)<EOL>continue<EOL><DEDENT>if var_any:<EOL><INDENT>yield tuple(reversed(var_any))<EOL>var_any.clear()<EOL>yield part<EOL>continue<EOL><DEDENT>yield part<EOL><DEDENT>if var_any:<EOL><INDENT>yield tuple(reversed(var_any))<EOL><DEDENT>
|
Yield URL parts. The given parts are usually in reverse order.
|
f11384:m2
|
def make_params(<EOL>key_parts: Sequence[str],<EOL>variable_parts: VariablePartsType) -> Dict[str, Union[str, Tuple[str]]]:
|
<EOL>return dict(zip(reversed(key_parts), _unwrap(variable_parts)))<EOL>
|
Map keys to variables. This map\
URL-pattern variables to\
a URL related parts
:param key_parts: A list of URL parts
:param variable_parts: A linked-list\
(ala nested tuples) of URL parts
:return: The param dict with the values\
assigned to the keys
:private:
|
f11384:m3
|
def __init__(self, max_depth: int=<NUM_LIT>) -> None:
|
self._max_depth_custom = max_depth<EOL>self._routes = {}<EOL>self._max_depth = <NUM_LIT:0><EOL>
|
:ivar _routes: \
Contain a graph with the parts of\
each URL pattern. This is referred as\
"partial route" later in the docs.
:vartype _routes: dict
:ivar _max_depth: Depth of the deepest\
registered pattern
:vartype _max_depth: int
:private-vars:
|
f11384:c1:m0
|
def _deconstruct_url(self, url: str) -> List[str]:
|
parts = url.split('<STR_LIT:/>', self._max_depth + <NUM_LIT:1>)<EOL>if depth_of(parts) > self._max_depth:<EOL><INDENT>raise RouteError('<STR_LIT>')<EOL><DEDENT>return parts<EOL>
|
Split a regular URL into parts
:param url: A normalized URL
:return: Parts of the URL
:raises kua.routes.RouteError: \
If the depth of the URL exceeds\
the max depth of the deepest\
registered pattern
:private:
|
f11384:c1:m1
|
def _match(self, parts: Sequence[str]) -> RouteResolved:
|
route_match = None <EOL>route_variable_parts = tuple() <EOL>to_visit = [(self._routes, tuple(), <NUM_LIT:0>)] <EOL>while to_visit:<EOL><INDENT>curr, curr_variable_parts, depth = to_visit.pop()<EOL>try:<EOL><INDENT>part = parts[depth]<EOL><DEDENT>except IndexError:<EOL><INDENT>if self._ROUTE_NODE in curr:<EOL><INDENT>route_match = curr[self._ROUTE_NODE]<EOL>route_variable_parts = curr_variable_parts<EOL>break<EOL><DEDENT>else:<EOL><INDENT>continue<EOL><DEDENT><DEDENT>if self._VAR_ANY_NODE in curr:<EOL><INDENT>to_visit.append((<EOL>{self._VAR_ANY_NODE: curr[self._VAR_ANY_NODE]},<EOL>(curr_variable_parts,<EOL>(self._VAR_ANY_NODE, part)),<EOL>depth + <NUM_LIT:1>))<EOL>to_visit.append((<EOL>curr[self._VAR_ANY_NODE],<EOL>(curr_variable_parts,<EOL>(self._VAR_ANY_BREAK, part)),<EOL>depth + <NUM_LIT:1>))<EOL><DEDENT>if self._VAR_NODE in curr:<EOL><INDENT>to_visit.append((<EOL>curr[self._VAR_NODE],<EOL>(curr_variable_parts,<EOL>(self._VAR_NODE, part)),<EOL>depth + <NUM_LIT:1>))<EOL><DEDENT>if part in curr:<EOL><INDENT>to_visit.append((<EOL>curr[part],<EOL>curr_variable_parts,<EOL>depth + <NUM_LIT:1>))<EOL><DEDENT><DEDENT>if not route_match:<EOL><INDENT>raise RouteError('<STR_LIT>')<EOL><DEDENT>return RouteResolved(<EOL>params=make_params(<EOL>key_parts=route_match.key_parts,<EOL>variable_parts=route_variable_parts),<EOL>anything=route_match.anything)<EOL>
|
Match URL parts to a registered pattern.
This function is basically where all\
the CPU-heavy work is done.
:param parts: URL parts
:return: Matched route
:raises kua.routes.RouteError: If there is no match
:private:
|
f11384:c1:m2
|
def match(self, url: str) -> RouteResolved:
|
url = normalize_url(url)<EOL>parts = self._deconstruct_url(url)<EOL>return self._match(parts)<EOL>
|
Match a URL to a registered pattern.
:param url: URL
:return: Matched route
:raises kua.RouteError: If there is no match
|
f11384:c1:m3
|
def add(self, url: str, anything: Any) -> None:
|
url = normalize_url(url)<EOL>parts = url.split('<STR_LIT:/>')<EOL>curr_partial_routes = self._routes<EOL>curr_key_parts = []<EOL>for part in parts:<EOL><INDENT>if part.startswith('<STR_LIT>'):<EOL><INDENT>curr_key_parts.append(part[<NUM_LIT:2>:])<EOL>part = self._VAR_ANY_NODE<EOL>self._max_depth = self._max_depth_custom<EOL><DEDENT>elif part.startswith('<STR_LIT::>'):<EOL><INDENT>curr_key_parts.append(part[<NUM_LIT:1>:])<EOL>part = self._VAR_NODE<EOL><DEDENT>curr_partial_routes = (curr_partial_routes<EOL>.setdefault(part, {}))<EOL><DEDENT>curr_partial_routes[self._ROUTE_NODE] = _Route(<EOL>key_parts=curr_key_parts,<EOL>anything=anything)<EOL>self._max_depth = max(self._max_depth, depth_of(parts))<EOL>
|
Register a URL pattern into\
the routes for later matching.
It's possible to attach any kind of\
object to the pattern for later\
retrieving. A dict with methods and callbacks,\
for example. Anything really.
Registration order does not matter.\
Adding a URL first or last makes no difference.
:param url: URL
:param anything: Literally anything.
|
f11384:c1:m4
|
def _unquote_c_string(s):
|
<EOL>def decode_match(match):<EOL><INDENT>return utf8_bytes_string(<EOL>codecs.decode(match.group(<NUM_LIT:0>), '<STR_LIT>')<EOL>)<EOL><DEDENT>if sys.version_info[<NUM_LIT:0>] >= <NUM_LIT:3> and isinstance(s, bytes):<EOL><INDENT>return ESCAPE_SEQUENCE_BYTES_RE.sub(decode_match, s)<EOL><DEDENT>else:<EOL><INDENT>return ESCAPE_SEQUENCE_RE.sub(decode_match, s)<EOL><DEDENT>
|
replace C-style escape sequences (\n, \", etc.) with real chars.
|
f11391:m0
|
def __init__(self, input_stream):
|
self.input = input_stream<EOL>self.lineno = <NUM_LIT:0><EOL>self._buffer = []<EOL>
|
A Parser that keeps track of line numbers.
:param input: the file-like object to read from
|
f11391:c0:m0
|
def abort(self, exception, *args):
|
raise exception(self.lineno, *args)<EOL>
|
Raise an exception providing line number information.
|
f11391:c0:m1
|
def readline(self):
|
self.lineno += <NUM_LIT:1><EOL>if self._buffer:<EOL><INDENT>return self._buffer.pop()<EOL><DEDENT>else:<EOL><INDENT>return self.input.readline()<EOL><DEDENT>
|
Get the next line including the newline or '' on EOF.
|
f11391:c0:m2
|
def next_line(self):
|
line = self.readline()<EOL>if line:<EOL><INDENT>return line[:-<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>return None<EOL><DEDENT>
|
Get the next line without the newline or None on EOF.
|
f11391:c0:m3
|
def push_line(self, line):
|
self.lineno -= <NUM_LIT:1><EOL>self._buffer.append(line + b'<STR_LIT:\n>')<EOL>
|
Push line back onto the line buffer.
:param line: the line with no trailing newline
|
f11391:c0:m4
|
def read_bytes(self, count):
|
result = self.input.read(count)<EOL>found = len(result)<EOL>self.lineno += result.count(b'<STR_LIT:\n>')<EOL>if found != count:<EOL><INDENT>self.abort(errors.MissingBytes, count, found)<EOL><DEDENT>return result<EOL>
|
Read a given number of bytes from the input stream.
Throws MissingBytes if the bytes are not found.
Note: This method does not read from the line buffer.
:return: a string
|
f11391:c0:m5
|
def read_until(self, terminator):
|
lines = []<EOL>term = terminator + b'<STR_LIT:\n>'<EOL>while True:<EOL><INDENT>line = self.input.readline()<EOL>if line == term:<EOL><INDENT>break<EOL><DEDENT>else:<EOL><INDENT>lines.append(line)<EOL><DEDENT><DEDENT>return b'<STR_LIT>'.join(lines)<EOL>
|
Read the input stream until the terminator is found.
Throws MissingTerminator if the terminator is not found.
Note: This method does not read from the line buffer.
:return: the bytes read up to but excluding the terminator.
|
f11391:c0:m6
|
def __init__(self, input_stream, verbose=False, output=sys.stdout,<EOL>user_mapper=None, strict=True):
|
LineBasedParser.__init__(self, input_stream)<EOL>self.verbose = verbose<EOL>self.output = output<EOL>self.user_mapper = user_mapper<EOL>self.strict = strict<EOL>self.date_parser = None<EOL>self.features = {}<EOL>
|
A Parser of import commands.
:param input_stream: the file-like object to read from
:param verbose: display extra information of not
:param output: the file-like object to write messages to (YAGNI?)
:param user_mapper: if not None, the UserMapper used to adjust
user-ids for authors, committers and taggers.
:param strict: Raise errors on strictly invalid data
|
f11391:c1:m0
|
def iter_commands(self):
|
while True:<EOL><INDENT>line = self.next_line()<EOL>if line is None:<EOL><INDENT>if b'<STR_LIT>' in self.features:<EOL><INDENT>raise errors.PrematureEndOfStream(self.lineno)<EOL><DEDENT>break<EOL><DEDENT>elif len(line) == <NUM_LIT:0> or line.startswith(b'<STR_LIT:#>'):<EOL><INDENT>continue<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield self._parse_commit(line[len(b'<STR_LIT>'):])<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield self._parse_blob()<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>break<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield commands.ProgressCommand(line[len(b'<STR_LIT>'):])<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield self._parse_reset(line[len(b'<STR_LIT>'):])<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield self._parse_tag(line[len(b'<STR_LIT>'):])<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield commands.CheckpointCommand()<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield self._parse_feature(line[len(b'<STR_LIT>'):])<EOL><DEDENT>else:<EOL><INDENT>self.abort(errors.InvalidCommand, line)<EOL><DEDENT><DEDENT>
|
Iterator returning ImportCommand objects.
|
f11391:c1:m2
|
def iter_file_commands(self):
|
while True:<EOL><INDENT>line = self.next_line()<EOL>if line is None:<EOL><INDENT>break<EOL><DEDENT>elif len(line) == <NUM_LIT:0> or line.startswith(b'<STR_LIT:#>'):<EOL><INDENT>continue<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield self._parse_file_modify(line[<NUM_LIT:2>:])<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>path = self._path(line[<NUM_LIT:2>:])<EOL>yield commands.FileDeleteCommand(path)<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>old, new = self._path_pair(line[<NUM_LIT:2>:])<EOL>yield commands.FileRenameCommand(old, new)<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>src, dest = self._path_pair(line[<NUM_LIT:2>:])<EOL>yield commands.FileCopyCommand(src, dest)<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>yield commands.FileDeleteAllCommand()<EOL><DEDENT>else:<EOL><INDENT>self.push_line(line)<EOL>break<EOL><DEDENT><DEDENT>
|
Iterator returning FileCommand objects.
If an invalid file command is found, the line is silently
pushed back and iteration ends.
|
f11391:c1:m3
|
def _parse_blob(self):
|
lineno = self.lineno<EOL>mark = self._get_mark_if_any()<EOL>data = self._get_data(b'<STR_LIT>')<EOL>return commands.BlobCommand(mark, data, lineno)<EOL>
|
Parse a blob command.
|
f11391:c1:m4
|
def _parse_commit(self, ref):
|
lineno = self.lineno<EOL>mark = self._get_mark_if_any()<EOL>author = self._get_user_info(b'<STR_LIT>', b'<STR_LIT>', False)<EOL>more_authors = []<EOL>while True:<EOL><INDENT>another_author = self._get_user_info(b'<STR_LIT>', b'<STR_LIT>', False)<EOL>if another_author is not None:<EOL><INDENT>more_authors.append(another_author)<EOL><DEDENT>else:<EOL><INDENT>break<EOL><DEDENT><DEDENT>committer = self._get_user_info(b'<STR_LIT>', b'<STR_LIT>')<EOL>message = self._get_data(b'<STR_LIT>', b'<STR_LIT:message>')<EOL>from_ = self._get_from()<EOL>merges = []<EOL>while True:<EOL><INDENT>merge = self._get_merge()<EOL>if merge is not None:<EOL><INDENT>these_merges = merge.split(b'<STR_LIT:U+0020>')<EOL>merges.extend(these_merges)<EOL><DEDENT>else:<EOL><INDENT>break<EOL><DEDENT><DEDENT>properties = {}<EOL>while True:<EOL><INDENT>name_value = self._get_property()<EOL>if name_value is not None:<EOL><INDENT>name, value = name_value<EOL>properties[name] = value<EOL><DEDENT>else:<EOL><INDENT>break<EOL><DEDENT><DEDENT>return commands.CommitCommand(ref, mark, author, committer, message,<EOL>from_, merges, list(self.iter_file_commands()), lineno=lineno,<EOL>more_authors=more_authors, properties=properties)<EOL>
|
Parse a commit command.
|
f11391:c1:m5
|
def _parse_feature(self, info):
|
parts = info.split(b'<STR_LIT:=>', <NUM_LIT:1>)<EOL>name = parts[<NUM_LIT:0>]<EOL>if len(parts) > <NUM_LIT:1>:<EOL><INDENT>value = self._path(parts[<NUM_LIT:1>])<EOL><DEDENT>else:<EOL><INDENT>value = None<EOL><DEDENT>self.features[name] = value<EOL>return commands.FeatureCommand(name, value, lineno=self.lineno)<EOL>
|
Parse a feature command.
|
f11391:c1:m6
|
def _parse_file_modify(self, info):
|
params = info.split(b'<STR_LIT:U+0020>', <NUM_LIT:2>)<EOL>path = self._path(params[<NUM_LIT:2>])<EOL>mode = self._mode(params[<NUM_LIT:0>])<EOL>if params[<NUM_LIT:1>] == b'<STR_LIT>':<EOL><INDENT>dataref = None<EOL>data = self._get_data(b'<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>dataref = params[<NUM_LIT:1>]<EOL>data = None<EOL><DEDENT>return commands.FileModifyCommand(path, mode, dataref,<EOL>data)<EOL>
|
Parse a filemodify command within a commit.
:param info: a string in the format "mode dataref path"
(where dataref might be the hard-coded literal 'inline').
|
f11391:c1:m7
|
def _parse_reset(self, ref):
|
from_ = self._get_from()<EOL>return commands.ResetCommand(ref, from_)<EOL>
|
Parse a reset command.
|
f11391:c1:m8
|
def _parse_tag(self, name):
|
from_ = self._get_from(b'<STR_LIT>')<EOL>tagger = self._get_user_info(b'<STR_LIT>', b'<STR_LIT>',<EOL>accept_just_who=True)<EOL>message = self._get_data(b'<STR_LIT>', b'<STR_LIT:message>')<EOL>return commands.TagCommand(name, from_, tagger, message)<EOL>
|
Parse a tag command.
|
f11391:c1:m9
|
def _get_mark_if_any(self):
|
line = self.next_line()<EOL>if line.startswith(b'<STR_LIT>'):<EOL><INDENT>return line[len(b'<STR_LIT>'):]<EOL><DEDENT>else:<EOL><INDENT>self.push_line(line)<EOL>return None<EOL><DEDENT>
|
Parse a mark section.
|
f11391:c1:m10
|
def _get_from(self, required_for=None):
|
line = self.next_line()<EOL>if line is None:<EOL><INDENT>return None<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>return line[len(b'<STR_LIT>'):]<EOL><DEDENT>elif required_for:<EOL><INDENT>self.abort(errors.MissingSection, required_for, '<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>self.push_line(line)<EOL>return None<EOL><DEDENT>
|
Parse a from section.
|
f11391:c1:m11
|
def _get_merge(self):
|
line = self.next_line()<EOL>if line is None:<EOL><INDENT>return None<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>return line[len(b'<STR_LIT>'):]<EOL><DEDENT>else:<EOL><INDENT>self.push_line(line)<EOL>return None<EOL><DEDENT>
|
Parse a merge section.
|
f11391:c1:m12
|
def _get_property(self):
|
line = self.next_line()<EOL>if line is None:<EOL><INDENT>return None<EOL><DEDENT>elif line.startswith(b'<STR_LIT>'):<EOL><INDENT>return self._name_value(line[len(b'<STR_LIT>'):])<EOL><DEDENT>else:<EOL><INDENT>self.push_line(line)<EOL>return None<EOL><DEDENT>
|
Parse a property section.
|
f11391:c1:m13
|
def _get_user_info(self, cmd, section, required=True,<EOL>accept_just_who=False):
|
line = self.next_line()<EOL>if line.startswith(section + b'<STR_LIT:U+0020>'):<EOL><INDENT>return self._who_when(line[len(section + b'<STR_LIT:U+0020>'):], cmd, section,<EOL>accept_just_who=accept_just_who)<EOL><DEDENT>elif required:<EOL><INDENT>self.abort(errors.MissingSection, cmd, section)<EOL><DEDENT>else:<EOL><INDENT>self.push_line(line)<EOL>return None<EOL><DEDENT>
|
Parse a user section.
|
f11391:c1:m14
|
def _get_data(self, required_for, section=b'<STR_LIT:data>'):
|
line = self.next_line()<EOL>if line.startswith(b'<STR_LIT>'):<EOL><INDENT>rest = line[len(b'<STR_LIT>'):]<EOL>if rest.startswith(b'<STR_LIT>'):<EOL><INDENT>return self.read_until(rest[<NUM_LIT:2>:])<EOL><DEDENT>else:<EOL><INDENT>size = int(rest)<EOL>read_bytes = self.read_bytes(size)<EOL>next_line = self.input.readline()<EOL>self.lineno += <NUM_LIT:1><EOL>if len(next_line) > <NUM_LIT:1> or next_line != b'<STR_LIT:\n>':<EOL><INDENT>self.push_line(next_line[:-<NUM_LIT:1>])<EOL><DEDENT>return read_bytes<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.abort(errors.MissingSection, required_for, section)<EOL><DEDENT>
|
Parse a data section.
|
f11391:c1:m15
|
def _who_when(self, s, cmd, section, accept_just_who=False):
|
match = _WHO_AND_WHEN_RE.search(s)<EOL>if match:<EOL><INDENT>datestr = match.group(<NUM_LIT:3>).lstrip()<EOL>if self.date_parser is None:<EOL><INDENT>if len(datestr.split(b'<STR_LIT:U+0020>')) == <NUM_LIT:2>:<EOL><INDENT>date_format = '<STR_LIT>'<EOL><DEDENT>elif datestr == b'<STR_LIT>':<EOL><INDENT>date_format = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>date_format = '<STR_LIT>'<EOL><DEDENT>self.date_parser = dates.DATE_PARSERS_BY_NAME[date_format]<EOL><DEDENT>try:<EOL><INDENT>when = self.date_parser(datestr, self.lineno)<EOL><DEDENT>except ValueError:<EOL><INDENT>print("<STR_LIT>" % (datestr,))<EOL>raise<EOL><DEDENT>name = match.group(<NUM_LIT:1>).rstrip()<EOL>email = match.group(<NUM_LIT:2>)<EOL><DEDENT>else:<EOL><INDENT>match = _WHO_RE.search(s)<EOL>if accept_just_who and match:<EOL><INDENT>when = dates.DATE_PARSERS_BY_NAME['<STR_LIT>']('<STR_LIT>')<EOL>name = match.group(<NUM_LIT:1>)<EOL>email = match.group(<NUM_LIT:2>)<EOL><DEDENT>elif self.strict:<EOL><INDENT>self.abort(errors.BadFormat, cmd, section, s)<EOL><DEDENT>else:<EOL><INDENT>name = s<EOL>email = None<EOL>when = dates.DATE_PARSERS_BY_NAME['<STR_LIT>']('<STR_LIT>')<EOL><DEDENT><DEDENT>if len(name) > <NUM_LIT:0>:<EOL><INDENT>if name.endswith(b'<STR_LIT:U+0020>'):<EOL><INDENT>name = name[:-<NUM_LIT:1>]<EOL><DEDENT><DEDENT>if self.user_mapper:<EOL><INDENT>name, email = self.user_mapper.map_name_and_email(name, email)<EOL><DEDENT>return Authorship(name, email, when[<NUM_LIT:0>], when[<NUM_LIT:1>])<EOL>
|
Parse who and when information from a string.
:return: a tuple of (name,email,timestamp,timezone). name may be
the empty string if only an email address was given.
|
f11391:c1:m16
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.