code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
start = time.time() if options_file: options = _get_options(options_file) elif options_json: options = options_json else: options = {} package_manager = _get_package_manager() pkg = package_manager.get_package_version(package_name, package_version) if package_version is None: # Get the resolved version for logging below package_version = 'auto:{}'.format(pkg.version()) if service_name is None: # Get the service name from the marathon template try: labels = pkg.marathon_json(options).get('labels') if 'DCOS_SERVICE_NAME' in labels: service_name = labels['DCOS_SERVICE_NAME'] except errors.DCOSException as e: pass print('\n{}installing {} with service={} version={} options={}'.format( shakedown.cli.helpers.fchr('>>'), package_name, service_name, package_version, options)) try: # Print pre-install notes to console log pre_install_notes = pkg.package_json().get('preInstallNotes') if pre_install_notes: print(pre_install_notes) package_manager.install_app(pkg, options, service_name) # Print post-install notes to console log post_install_notes = pkg.package_json().get('postInstallNotes') if post_install_notes: print(post_install_notes) # Optionally wait for the app's deployment to finish if wait_for_completion: print("\n{}waiting for {} deployment to complete...".format( shakedown.cli.helpers.fchr('>>'), service_name)) if expected_running_tasks > 0 and service_name is not None: wait_for_service_tasks_running(service_name, expected_running_tasks, timeout_sec) app_id = pkg.marathon_json(options).get('id') shakedown.deployment_wait(timeout_sec, app_id) print('\n{}install completed after {}\n'.format( shakedown.cli.helpers.fchr('>>'), pretty_duration(time.time() - start))) else: print('\n{}install started after {}\n'.format( shakedown.cli.helpers.fchr('>>'), pretty_duration(time.time() - start))) except errors.DCOSException as e: print('\n{}{}'.format( shakedown.cli.helpers.fchr('>>'), e)) # Install subcommands (if defined) if pkg.cli_definition(): print("{}installing CLI commands for package '{}'".format( shakedown.cli.helpers.fchr('>>'), package_name)) subcommand.install(pkg) return True
def install_package( package_name, package_version=None, service_name=None, options_file=None, options_json=None, wait_for_completion=False, timeout_sec=600, expected_running_tasks=0 )
Install a package via the DC/OS library :param package_name: name of the package :type package_name: str :param package_version: version of the package (defaults to latest) :type package_version: str :param service_name: unique service name for the package :type service_name: str :param options_file: filename that has options to use and is JSON format :type options_file: str :param options_json: dict that has options to use and is JSON format :type options_json: dict :param wait_for_completion: whether or not to wait for the app's deployment to complete :type wait_for_completion: bool :param timeout_sec: number of seconds to wait for task completion :type timeout_sec: int :param expected_running_tasks: number of service tasks to check for, or zero to disable :type expected_task_count: int :return: True if installation was successful, False otherwise :rtype: bool
2.766806
2.748421
1.006689
return install_package( package_name, package_version, service_name, options_file, options_json, wait_for_completion, timeout_sec, expected_running_tasks )
def install_package_and_wait( package_name, package_version=None, service_name=None, options_file=None, options_json=None, wait_for_completion=True, timeout_sec=600, expected_running_tasks=0 )
Install a package via the DC/OS library and wait for completion
1.707147
1.915916
0.891035
package_manager = _get_package_manager() app_installed = len(package_manager.installed_apps(package_name, service_name)) > 0 subcommand_installed = False for subcmd in package.installed_subcommands(): package_json = subcmd.package_json() if package_json['name'] == package_name: subcommand_installed = True return (app_installed or subcommand_installed)
def package_installed(package_name, service_name=None)
Check whether the package package_name is currently installed. :param package_name: package name :type package_name: str :param service_name: service_name :type service_name: str :return: True if installed, False otherwise :rtype: bool
3.408625
4.032657
0.845255
package_manager = _get_package_manager() pkg = package_manager.get_package_version(package_name, None) try: if service_name is None: service_name = _get_service_name(package_name, pkg) print("{}uninstalling package '{}' with service name '{}'\n".format( shakedown.cli.helpers.fchr('>>'), package_name, service_name)) package_manager.uninstall_app(package_name, all_instances, service_name) # Optionally wait for the service to unregister as a framework if wait_for_completion: wait_for_mesos_task_removal(service_name, timeout_sec=timeout_sec) except errors.DCOSException as e: print('\n{}{}'.format( shakedown.cli.helpers.fchr('>>'), e)) # Uninstall subcommands (if defined) if pkg.cli_definition(): print("{}uninstalling CLI commands for package '{}'".format( shakedown.cli.helpers.fchr('>>'), package_name)) subcommand.uninstall(package_name) return True
def uninstall_package( package_name, service_name=None, all_instances=False, wait_for_completion=False, timeout_sec=600 )
Uninstall a package using the DC/OS library. :param package_name: name of the package :type package_name: str :param service_name: unique service name for the package :type service_name: str :param all_instances: uninstall all instances of package :type all_instances: bool :param wait_for_completion: whether or not to wait for task completion before returning :type wait_for_completion: bool :param timeout_sec: number of seconds to wait for task completion :type timeout_sec: int :return: True if uninstall was successful, False otherwise :rtype: bool
4.038613
4.175226
0.96728
return uninstall_package( package_name, service_name, all_instances, wait_for_completion, timeout_sec )
def uninstall_package_and_wait( package_name, service_name=None, all_instances=False, wait_for_completion=True, timeout_sec=600 )
Uninstall a package via the DC/OS library and wait for completion :param package_name: name of the package :type package_name: str :param service_name: unique service name for the package :type service_name: str :param all_instances: uninstall all instances of package :type all_instances: bool :param wait_for_completion: whether or not to wait for task completion before returning :type wait_for_completion: bool :param timeout_sec: number of seconds to wait for task completion :type timeout_sec: int :return: True if uninstall was successful, False otherwise :rtype: bool
2.1635
2.792039
0.774882
start = time.time() if service_name is None: pkg = _get_package_manager().get_package_version(package_name, None) service_name = _get_service_name(package_name, pkg) print('\n{}uninstalling/deleting {}'.format(shakedown.cli.helpers.fchr('>>'), service_name)) try: uninstall_package_and_wait(package_name, service_name=service_name, timeout_sec=timeout_sec) except (errors.DCOSException, ValueError) as e: print('Got exception when uninstalling package, ' + 'continuing with janitor anyway: {}'.format(e)) data_start = time.time() if (not role or not principal or not zk_node) and service_name is None: raise DCOSException('service_name must be provided when data params are missing AND the package isn\'t installed') if not role: role = '{}-role'.format(service_name) if not zk_node: zk_node = 'dcos-service-{}'.format(service_name) delete_persistent_data(role, zk_node) finish = time.time() print('\n{}uninstall/delete done after pkg({}) + data({}) = total({})\n'.format( shakedown.cli.helpers.fchr('>>'), pretty_duration(data_start - start), pretty_duration(finish - data_start), pretty_duration(finish - start)))
def uninstall_package_and_data( package_name, service_name=None, role=None, principal=None, zk_node=None, timeout_sec=600)
Uninstall a package via the DC/OS library, wait for completion, and delete any persistent data :param package_name: name of the package :type package_name: str :param service_name: unique service name for the package :type service_name: str :param role: role to use when deleting data, or <service_name>-role if unset :type role: str, or None :param principal: principal to use when deleting data, or <service_name>-principal if unset :type principal: str, or None :param zk_node: zk node to delete, or dcos-service-<service_name> if unset :type zk_node: str, or None :param wait_for_completion: whether or not to wait for task completion before returning :type wait_for_completion: bool :param timeout_sec: number of seconds to wait for task completion :type timeout_sec: int
4.051612
3.822497
1.059939
package_manager = _get_package_manager() if wait_for_package: prev_version = package_manager.get_package_version(wait_for_package, None) if not package_manager.add_repo(repo_name, repo_url, index): return False if wait_for_package: try: spinner.time_wait(lambda: package_version_changed_predicate(package_manager, wait_for_package, prev_version)) except TimeoutExpired: return False return True
def add_package_repo( repo_name, repo_url, index=None, wait_for_package=None, expect_prev_version=None)
Add a repository to the list of package sources :param repo_name: name of the repository to add :type repo_name: str :param repo_url: location of the repository to add :type repo_url: str :param index: index (precedence) for this repository :type index: int :param wait_for_package: the package whose version should change after the repo is added :type wait_for_package: str, or None :return: True if successful, False otherwise :rtype: bool
3.268348
3.199299
1.021583
package_manager = _get_package_manager() if wait_for_package: prev_version = package_manager.get_package_version(wait_for_package, None) if not package_manager.remove_repo(repo_name): return False if wait_for_package: try: spinner.time_wait(lambda: package_version_changed_predicate(package_manager, wait_for_package, prev_version)) except TimeoutExpired: return False return True
def remove_package_repo(repo_name, wait_for_package=None)
Remove a repository from the list of package sources :param repo_name: name of the repository to remove :type repo_name: str :param wait_for_package: the package whose version should change after the repo is removed :type wait_for_package: str, or None :returns: True if successful, False otherwise :rtype: bool
3.440001
3.661289
0.93956
package_manager = _get_package_manager() pkg = package_manager.get_package_version(package_name, None) return pkg.package_versions()
def get_package_versions(package_name)
Returns the list of versions of a given package :param package_name: name of the package :type package_name: str
4.715646
6.39449
0.737455
# Find a group of adjacent available tiles at this zoom level rows = self._query('''SELECT tile_column, tile_row FROM tiles WHERE zoom_level=? ORDER BY tile_column, tile_row;''', (zoom,)) t = rows.fetchone() xmin, ymin = t previous = t while t and t[0] - previous[0] <= 1: # adjacent, go on previous = t t = rows.fetchone() xmax, ymax = previous # Transform (xmin, ymin) (xmax, ymax) to pixels S = self.tilesize bottomleft = (xmin * S, (ymax + 1) * S) topright = ((xmax + 1) * S, ymin * S) # Convert center to (lon, lat) proj = GoogleProjection(S, [zoom]) # WGS84 return proj.unproject_pixels(bottomleft, zoom) + proj.unproject_pixels(topright, zoom)
def find_coverage(self, zoom)
Returns the bounding box (minx, miny, maxx, maxy) of an adjacent group of tiles at this zoom level.
5.270222
4.725417
1.115293
logger.debug(_("Download tile %s") % ((z, x, y),)) # Render each keyword in URL ({s}, {x}, {y}, {z}, {size} ... ) size = self.tilesize s = self.tiles_subdomains[(x + y) % len(self.tiles_subdomains)]; try: url = self.tiles_url.format(**locals()) except KeyError as e: raise DownloadError(_("Unknown keyword %s in URL") % e) logger.debug(_("Retrieve tile at %s") % url) r = DOWNLOAD_RETRIES sleeptime = 1 while r > 0: try: request = requests.get(url, headers=self.headers) if request.status_code == 200: return request.content raise DownloadError(_("Status code : %s, url : %s") % (request.status_code, url)) except requests.exceptions.ConnectionError as e: logger.debug(_("Download error, retry (%s left). (%s)") % (r, e)) r -= 1 time.sleep(sleeptime) # progressivly sleep longer to wait for this tile if (sleeptime <= 10) and (r % 2 == 0): sleeptime += 1 # increase wait raise DownloadError(_("Cannot download URL %s") % url)
def tile(self, z, x, y)
Download the specified tile from `tiles_url`
4.382543
4.172803
1.050263
logger.debug(_("Render tile %s") % ((z, x, y),)) proj = GoogleProjection(self.tilesize, [z]) return self.render(proj.tile_bbox((z, x, y)))
def tile(self, z, x, y)
Render the specified tile with Mapnik
8.518836
8.118245
1.049345
width = width or self.tilesize height = height or self.tilesize self._prepare_rendering(bbox, width=width, height=height) # Render image with default Agg renderer tmpfile = NamedTemporaryFile(delete=False) im = mapnik.Image(width, height) mapnik.render(self._mapnik, im) im.save(tmpfile.name, 'png256') # TODO: mapnik output only to file? tmpfile.close() content = open(tmpfile.name, 'rb').read() os.unlink(tmpfile.name) return content
def render(self, bbox, width=None, height=None)
Render the specified tile with Mapnik
3.540555
3.254502
1.087895
logger.debug(_("Render grid %s") % ((z, x, y),)) proj = GoogleProjection(self.tilesize, [z]) return self.render_grid(proj.tile_bbox((z, x, y)), fields, layer)
def grid(self, z, x, y, fields, layer)
Render the specified grid with Mapnik
7.529789
6.942068
1.084661
width = width or self.tilesize height = height or self.tilesize self._prepare_rendering(bbox, width=width, height=height) grid = mapnik.Grid(width, height) mapnik.render_layer(self._mapnik, grid, layer=layer, fields=grid_fields) grid = grid.encode() return json.dumps(grid)
def render_grid(self, bbox, grid_fields, layer, width=None, height=None)
Render the specified grid with Mapnik
3.783599
3.759785
1.006334
x, y = self.project_pixels(position, zoom) return (zoom, int(x/self.tilesize), int(y/self.tilesize))
def tile_at(self, zoom, position)
Returns a tuple of (z, x, y)
4.300409
3.393616
1.267205
(lng, lat) = lng_lat x = lng * DEG_TO_RAD lat = max(min(MAX_LATITUDE, lat), -MAX_LATITUDE) y = lat * DEG_TO_RAD y = log(tan((pi / 4) + (y / 2))) return (x*EARTH_RADIUS, y*EARTH_RADIUS)
def project(self, lng_lat)
Returns the coordinates in meters from WGS84
2.850924
2.602073
1.095636
colorstring = colorstring.strip() if colorstring[0] == '#': colorstring = colorstring[1:] if len(colorstring) < 6: raise ValueError("input #%s is not in #RRGGBB format" % colorstring) r, g, b = colorstring[:2], colorstring[2:4], colorstring[4:6] a = 'ff' if len(colorstring) > 6: a = colorstring[6:8] r, g, b, a = [int(n, 16) for n in (r, g, b, a)] return (r, g, b, a)
def string2rgba(cls, colorstring)
Convert #RRGGBBAA to an (R, G, B, A) tuple
1.74557
1.655335
1.054512
proj = GoogleProjection(self.tile_size, zoomlevels, self.tile_scheme) return proj.tileslist(bbox)
def tileslist(self, bbox, zoomlevels)
Build the tiles list within the bottom-left/top-right bounding box (minx, miny, maxx, maxy) at the specified zoom levels. Return a list of tuples (z,x,y)
6.727597
8.15035
0.825437
assert has_pil, _("Cannot blend layers without python PIL") assert self.tile_size == tilemanager.tile_size, _("Cannot blend layers whose tile size differs") assert 0 <= opacity <= 1, _("Opacity should be between 0.0 (transparent) and 1.0 (opaque)") self.cache.basename += '%s%.1f' % (tilemanager.cache.basename, opacity) self._layers.append((tilemanager, opacity))
def add_layer(self, tilemanager, opacity=1.0)
Add a layer to be blended (alpha-composite) on top of the tile. tilemanager -- a `TileManager` instance opacity -- transparency factor for compositing
5.337922
5.072339
1.052359
assert has_pil, _("Cannot add filters without python PIL") self.cache.basename += filter_.basename self._filters.append(filter_)
def add_filter(self, filter_)
Add an image filter for post-processing
21.698751
19.283007
1.125278
(z, x, y) = z_x_y logger.debug(_("tile method called with %s") % ([z, x, y])) output = self.cache.read((z, x, y)) if output is None: output = self.reader.tile(z, x, y) # Blend layers if len(self._layers) > 0: logger.debug(_("Will blend %s layer(s)") % len(self._layers)) output = self._blend_layers(output, (z, x, y)) # Apply filters for f in self._filters: image = f.process(self._tile_image(output)) output = self._image_tile(image) # Save result to cache self.cache.save(output, (z, x, y)) self.rendered += 1 return output
def tile(self, z_x_y)
Return the tile (binary) content of the tile and seed the cache.
3.507051
3.451056
1.016226
# sources.py -> MapnikRenderer -> grid (z, x, y) = z_x_y content = self.reader.grid(z, x, y, self.grid_fields, self.grid_layer) return content
def grid(self, z_x_y)
Return the UTFGrid content
10.17229
7.911353
1.285784
(z, x, y) = z_x_y result = self._tile_image(imagecontent) # Paste each layer for (layer, opacity) in self._layers: try: # Prepare tile of overlay, if available overlay = self._tile_image(layer.tile((z, x, y))) except (IOError, DownloadError, ExtractionError)as e: logger.warn(e) continue # Extract alpha mask overlay = overlay.convert("RGBA") r, g, b, a = overlay.split() overlay = Image.merge("RGB", (r, g, b)) a = ImageEnhance.Brightness(a).enhance(opacity) overlay.putalpha(a) mask = Image.merge("L", (a,)) result.paste(overlay, (0, 0), mask) # Read result return self._image_tile(result)
def _blend_layers(self, imagecontent, z_x_y)
Merge tiles of all layers into the specified tile path
4.048949
3.923917
1.031864
image = Image.open(BytesIO(data)) return image.convert('RGBA')
def _tile_image(self, data)
Tile binary content as PIL Image.
4.419976
3.192427
1.384519
zooms = set() for coverage in self._bboxes: for zoom in coverage[1]: zooms.add(zoom) return sorted(zooms)
def zoomlevels(self)
Return the list of covered zoom levels, in ascending order
5.155621
4.624485
1.114853
if os.path.exists(self.filepath): if force: logger.warn(_("%s already exists. Overwrite.") % self.filepath) os.remove(self.filepath) else: # Already built, do not do anything. logger.info(_("%s already exists. Nothing to do.") % self.filepath) return # Clean previous runs self._clean_gather() # If no coverage added, use bottom layer metadata if len(self._bboxes) == 0 and len(self._layers) > 0: bottomlayer = self._layers[0] metadata = bottomlayer.reader.metadata() if 'bounds' in metadata: logger.debug(_("Use bounds of bottom layer %s") % bottomlayer) bbox = map(float, metadata.get('bounds', '').split(',')) zoomlevels = range(int(metadata.get('minzoom', 0)), int(metadata.get('maxzoom', 0))) self.add_coverage(bbox=bbox, zoomlevels=zoomlevels) # Compute list of tiles tileslist = set() for bbox, levels in self._bboxes: logger.debug(_("Compute list of tiles for bbox %s on zooms %s.") % (bbox, levels)) bboxlist = self.tileslist(bbox, levels) logger.debug(_("Add %s tiles.") % len(bboxlist)) tileslist = tileslist.union(bboxlist) logger.debug(_("%s tiles in total.") % len(tileslist)) self.nbtiles = len(tileslist) if not self.nbtiles: raise EmptyCoverageError(_("No tiles are covered by bounding boxes : %s") % self._bboxes) logger.debug(_("%s tiles to be packaged.") % self.nbtiles) # Go through whole list of tiles and gather them in tmp_dir self.rendered = 0 for (z, x, y) in tileslist: try: self._gather((z, x, y)) except Exception as e: logger.warn(e) if not self.ignore_errors: raise logger.debug(_("%s tiles were missing.") % self.rendered) # Some metadata middlezoom = self.zoomlevels[len(self.zoomlevels) // 2] lat = self.bounds[1] + (self.bounds[3] - self.bounds[1])/2 lon = self.bounds[0] + (self.bounds[2] - self.bounds[0])/2 metadata = {} metadata['name'] = str(uuid.uuid4()) metadata['format'] = self._tile_extension[1:] metadata['minzoom'] = self.zoomlevels[0] metadata['maxzoom'] = self.zoomlevels[-1] metadata['bounds'] = '%s,%s,%s,%s' % tuple(self.bounds) metadata['center'] = '%s,%s,%s' % (lon, lat, middlezoom) #display informations from the grids on hover content_to_display = '' for field_name in self.grid_fields: content_to_display += "{{{ %s }}}<br>" % field_name metadata['template'] = '{{#__location__}}{{/__location__}} {{#__teaser__}} \ %s {{/__teaser__}}{{#__full__}}{{/__full__}}' % content_to_display metadatafile = os.path.join(self.tmp_dir, 'metadata.json') with open(metadatafile, 'w') as output: json.dump(metadata, output) # TODO: add UTF-Grid of last layer, if any # Package it! logger.info(_("Build MBTiles file '%s'.") % self.filepath) extension = self.tile_format.split("image/")[-1] disk_to_mbtiles( self.tmp_dir, self.filepath, format=extension, scheme=self.cache.scheme ) try: os.remove("%s-journal" % self.filepath) # created by mbutil except OSError as e: pass self._clean_gather()
def run(self, force=False)
Build a MBTile file. force -- overwrite if MBTiles file already exists.
3.719158
3.614155
1.029053
tiles = self.tileslist(bbox, [zoomlevel]) grid = {} for (z, x, y) in tiles: if not grid.get(y): grid[y] = [] grid[y].append(x) sortedgrid = [] for y in sorted(grid.keys(), reverse=self.tile_scheme == 'tms'): sortedgrid.append([(x, y) for x in sorted(grid[y])]) return sortedgrid
def grid_tiles(self, bbox, zoomlevel)
Return a grid of (x, y) tuples representing the juxtaposition of tiles on the specified ``bbox`` at the specified ``zoomlevel``.
3.125441
3.034254
1.030053
assert has_pil, _("Cannot export image without python PIL") grid = self.grid_tiles(bbox, zoomlevel) width = len(grid[0]) height = len(grid) widthpix = width * self.tile_size heightpix = height * self.tile_size result = Image.new("RGBA", (widthpix, heightpix)) offset = (0, 0) for i, row in enumerate(grid): for j, (x, y) in enumerate(row): offset = (j * self.tile_size, i * self.tile_size) img = self._tile_image(self.tile((zoomlevel, x, y))) result.paste(img, offset) logger.info(_("Save resulting image to '%s'") % imagepath) result.save(imagepath)
def export_image(self, bbox, zoomlevel, imagepath)
Writes to ``imagepath`` the tiles for the specified bounding box and zoomlevel.
2.894582
2.887484
1.002458
scripts = [] for scriptRecord in gpos.ScriptList.ScriptRecord: scripts.append(scriptRecord.ScriptTag) if "DFLT" in scripts: scripts.remove("DFLT") scripts.insert(0, "DFLT") return sorted(scripts)
def _makeScriptOrder(gpos)
Run therough GPOS and make an alphabetically ordered list of scripts. If DFLT is in the list, move it to the front.
3.246136
2.634506
1.232161
lookupIndexes = _gatherLookupIndexes(gpos) seenLookups = set() kerningDictionaries = [] leftClassDictionaries = [] rightClassDictionaries = [] for script in scriptOrder: kerning = [] leftClasses = [] rightClasses = [] for lookupIndex in lookupIndexes[script]: if lookupIndex in seenLookups: continue seenLookups.add(lookupIndex) result = _gatherKerningForLookup(gpos, lookupIndex) if result is None: continue k, lG, rG = result kerning.append(k) leftClasses.append(lG) rightClasses.append(rG) if kerning: kerningDictionaries.append(kerning) leftClassDictionaries.append(leftClasses) rightClassDictionaries.append(rightClasses) return kerningDictionaries, leftClassDictionaries, rightClassDictionaries
def _gatherDataFromLookups(gpos, scriptOrder)
Gather kerning and classes from the applicable lookups and return them in script order.
2.088584
1.98911
1.05001
# gather the indexes of the kern features kernFeatureIndexes = [index for index, featureRecord in enumerate(gpos.FeatureList.FeatureRecord) if featureRecord.FeatureTag == "kern"] # find scripts and languages that have kern features scriptKernFeatureIndexes = {} for scriptRecord in gpos.ScriptList.ScriptRecord: script = scriptRecord.ScriptTag thisScriptKernFeatureIndexes = [] defaultLangSysRecord = scriptRecord.Script.DefaultLangSys if defaultLangSysRecord is not None: f = [] for featureIndex in defaultLangSysRecord.FeatureIndex: if featureIndex not in kernFeatureIndexes: continue f.append(featureIndex) if f: thisScriptKernFeatureIndexes.append((None, f)) if scriptRecord.Script.LangSysRecord is not None: for langSysRecord in scriptRecord.Script.LangSysRecord: langSys = langSysRecord.LangSysTag f = [] for featureIndex in langSysRecord.LangSys.FeatureIndex: if featureIndex not in kernFeatureIndexes: continue f.append(featureIndex) if f: thisScriptKernFeatureIndexes.append((langSys, f)) scriptKernFeatureIndexes[script] = thisScriptKernFeatureIndexes # convert the feature indexes to lookup indexes scriptLookupIndexes = {} for script, featureDefinitions in scriptKernFeatureIndexes.items(): lookupIndexes = scriptLookupIndexes[script] = [] for language, featureIndexes in featureDefinitions: for featureIndex in featureIndexes: featureRecord = gpos.FeatureList.FeatureRecord[featureIndex] for lookupIndex in featureRecord.Feature.LookupListIndex: if lookupIndex not in lookupIndexes: lookupIndexes.append(lookupIndex) # done return scriptLookupIndexes
def _gatherLookupIndexes(gpos)
Gather a mapping of script to lookup indexes referenced by the kern feature for each script. Returns a dictionary of this structure: { "latn" : [0], "DFLT" : [0] }
2.299201
2.140805
1.073989
allKerning = {} allLeftClasses = {} allRightClasses = {} lookup = gpos.LookupList.Lookup[lookupIndex] # only handle pair positioning and extension if lookup.LookupType not in (2, 9): return for subtableIndex, subtable in enumerate(lookup.SubTable): if lookup.LookupType == 2: format = subtable.Format lookupType = subtable.LookupType if (lookupType, format) == (2, 1): kerning = _handleLookupType2Format1(subtable) allKerning.update(kerning) elif (lookupType, format) == (2, 2): kerning, leftClasses, rightClasses = _handleLookupType2Format2(subtable, lookupIndex, subtableIndex) allKerning.update(kerning) allLeftClasses.update(leftClasses) allRightClasses.update(rightClasses) elif lookup.LookupType == 9: extSubtable = subtable.ExtSubTable format = extSubtable.Format lookupType = extSubtable.LookupType if (lookupType, format) == (2, 1): kerning = _handleLookupType2Format1(extSubtable) allKerning.update(kerning) elif (lookupType, format) == (2, 2): kerning, leftClasses, rightClasses = _handleLookupType2Format2(extSubtable, lookupIndex, subtableIndex) allKerning.update(kerning) allLeftClasses.update(leftClasses) allRightClasses.update(rightClasses) # done return allKerning, allLeftClasses, allRightClasses
def _gatherKerningForLookup(gpos, lookupIndex)
Gather the kerning and class data for a particular lookup. Returns kerning, left clases, right classes. The kerning dictionary is of this structure: { ("a", "a") : 10, ((1, 1, 3), "a") : -20 } The class dictionaries have this structure: { (1, 1, 3) : ["x", "y", "z"] } Where the tuple means this: (lookup index, subtable index, class index)
1.86364
1.796071
1.037621
kerning = {} coverage = subtable.Coverage.glyphs valueFormat1 = subtable.ValueFormat1 pairSets = subtable.PairSet for index, leftGlyphName in enumerate(coverage): pairSet = pairSets[index] for pairValueRecord in pairSet.PairValueRecord: rightGlyphName = pairValueRecord.SecondGlyph if valueFormat1: value = pairValueRecord.Value1 else: value = pairValueRecord.Value2 if hasattr(value, "XAdvance"): value = value.XAdvance kerning[leftGlyphName, rightGlyphName] = value return kerning
def _handleLookupType2Format1(subtable)
Extract a kerning dictionary from a Lookup Type 2 Format 1.
3.189595
2.894891
1.101801
# extract the classes leftClasses = _extractFeatureClasses(lookupIndex=lookupIndex, subtableIndex=subtableIndex, classDefs=subtable.ClassDef1.classDefs, coverage=subtable.Coverage.glyphs) rightClasses = _extractFeatureClasses(lookupIndex=lookupIndex, subtableIndex=subtableIndex, classDefs=subtable.ClassDef2.classDefs) # extract the pairs kerning = {} for class1RecordIndex, class1Record in enumerate(subtable.Class1Record): for class2RecordIndex, class2Record in enumerate(class1Record.Class2Record): leftClass = (lookupIndex, subtableIndex, class1RecordIndex) rightClass = (lookupIndex, subtableIndex, class2RecordIndex) valueFormat1 = subtable.ValueFormat1 if valueFormat1: value = class2Record.Value1 else: value = class2Record.Value2 if hasattr(value, "XAdvance") and value.XAdvance != 0: value = value.XAdvance kerning[leftClass, rightClass] = value return kerning, leftClasses, rightClasses
def _handleLookupType2Format2(subtable, lookupIndex, subtableIndex)
Extract kerning, left class and right class dictionaries from a Lookup Type 2 Format 2.
2.92926
2.575825
1.137212
# work through the dictionaries backwards since # this uses an update to load the kerning. this # will ensure that the script order is honored. kerning = {} for dictionaryGroup in reversed(kerningDictionaries): for dictionary in dictionaryGroup: kerning.update(dictionary) # done. return kerning
def _mergeKerningDictionaries(kerningDictionaries)
Merge all of the kerning dictionaries found into one flat dictionary.
9.111169
9.093829
1.001907
toRemove = {} for classDictionaryGroup in classDictionaries: for classDictionary in classDictionaryGroup: for name, members in list(classDictionary.items()): if len(members) == 1: toRemove[name] = list(members)[0] del classDictionary[name] return toRemove
def _findSingleMemberGroups(classDictionaries)
Find all classes that have only one member.
2.892926
2.853015
1.013989
new = {} for (left, right), value in kerning.items(): left = leftGroups.get(left, left) right = rightGroups.get(right, right) new[left, right] = value return new
def _removeSingleMemberGroupReferences(kerning, leftGroups, rightGroups)
Translate group names into glyph names in pairs if the group only contains one glyph.
2.582055
2.355638
1.096117
# build a mapping of members to names memberTree = {} for classDictionaryGroup in classDictionaries: for classDictionary in classDictionaryGroup: for name, members in classDictionary.items(): if members not in memberTree: memberTree[members] = set() memberTree[members].add(name) # find members that have more than one name classes = {} rename = {} for members, names in memberTree.items(): name = names.pop() if len(names) > 0: for otherName in names: rename[otherName] = name classes[name] = members return classes, rename
def _mergeClasses(classDictionaries)
Look for classes that have the exact same list of members and flag them for removal. This returns left classes, left rename map, right classes and right rename map. The classes have the standard class structure. The rename maps have this structure: { (1, 1, 3) : (2, 3, 4), old name : new name } Where the key is the class that should be preserved and the value is a list of classes that should be removed.
2.904855
2.720909
1.067605
groups = {} for groupName, glyphList in classes.items(): groupName = classRename.get(groupName, groupName) # if the glyph list has only one member, # the glyph name will be used in the pairs. # no group is needed. if len(glyphList) == 1: continue groups[groupName] = glyphList return groups
def _setGroupNames(classes, classRename)
Set the final names into the groups.
4.165493
4.039374
1.031222
glyphToClass = {} for className, glyphList in classes.items(): for glyphName in glyphList: if glyphName not in glyphToClass: glyphToClass[glyphName] = set() glyphToClass[glyphName].add(className) for glyphName, groupList in glyphToClass.items(): if len(groupList) > 1: raise ExtractorError("Kerning classes are in an conflicting state.")
def _validateClasses(classes)
Check to make sure that a glyph is not part of more than one class. If this is found, an ExtractorError is raised.
3.391572
2.690989
1.260344
renamedKerning = {} for (left, right), value in kerning.items(): left = leftRename.get(left, left) right = rightRename.get(right, right) renamedKerning[left, right] = value return renamedKerning
def _replaceRenamedPairMembers(kerning, leftRename, rightRename)
Populate the renamed pair members into the kerning.
2.019689
2.094455
0.964303
renameMap = {} for classID, glyphList in classes.items(): if len(glyphList) == 0: groupName = "%s_empty_lu.%d_st.%d_cl.%d" % (prefix, classID[0], classID[1], classID[2]) elif len(glyphList) == 1: groupName = list(glyphList)[0] else: glyphList = list(sorted(glyphList)) groupName = prefix + glyphList[0] renameMap[classID] = groupName return renameMap
def _renameClasses(classes, prefix)
Replace class IDs with nice strings.
3.387585
3.407791
0.994071
# gather the class members classDict = {} for glyphName, classIndex in classDefs.items(): if classIndex not in classDict: classDict[classIndex] = set() classDict[classIndex].add(glyphName) # specially handle class index 0 revisedClass0 = set() if coverage is not None and 0 in classDict: for glyphName in classDict[0]: if glyphName in coverage: revisedClass0.add(glyphName) elif coverage is not None and 0 not in classDict: revisedClass0 = set(coverage) for glyphList in classDict.values(): revisedClass0 = revisedClass0 - glyphList classDict[0] = revisedClass0 # flip the class map around classes = {} for classIndex, glyphList in classDict.items(): classes[lookupIndex, subtableIndex, classIndex] = frozenset(glyphList) return classes
def _extractFeatureClasses(lookupIndex, subtableIndex, classDefs, coverage=None)
Extract classes for a specific lookup in a specific subtable. This is relatively straightforward, except for class 0 interpretation. Some fonts don't have class 0. Some fonts have a list of class members that are clearly not all to be used in kerning pairs. In the case of a missing class 0, the coverage is used as a basis for the class and glyph names used in classed 1+ are filtered out. In the case of class 0 having glyph names that are not part of the kerning pairs, the coverage is used to filter out the unnecessary glyph names.
2.86402
2.579785
1.110178
from .users import User return session.fetch_items("polls.getVoters", User._get_users, count=100, owner_id=owner_id, poll_id=poll_id, answer_ids=answer_id)
def _get_voters(cls, session, owner_id, poll_id, answer_id)
https://vk.com/dev/polls.getVoters
6.450163
3.762515
1.714322
assert name, 'name is required' assert self.can_include if name in self.includes: raise ThriftCompilerError( 'Cannot include module "%s" as "%s" in "%s". ' 'The name is already taken.' % (module_spec.name, name, self.path) ) self.includes[name] = module_spec self.scope.add_include(name, module_spec.scope, module_spec.surface)
def add_include(self, name, module_spec)
Adds a module as an included module. :param name: Name under which the included module should be exposed in the current module. :param module_spec: ModuleSpec of the included module.
4.144594
4.265421
0.971673
if self.linked: return self self.linked = True included_modules = [] # Link includes for include in self.includes.values(): included_modules.append(include.link().surface) self.scope.add_surface('__includes__', tuple(included_modules)) self.scope.add_surface('__thrift_source__', self.thrift_source) # Link self for linker in LINKERS: linker(self.scope).link() self.scope.add_surface('loads', Deserializer(self.protocol)) self.scope.add_surface('dumps', Serializer(self.protocol)) return self
def link(self)
Link all the types in this module and all included modules.
4.873939
4.145293
1.175777
assert name if path: path = os.path.abspath(path) if path in self._module_specs: return self._module_specs[path] module_spec = ModuleSpec(name, self.protocol, path, contents) if path: self._module_specs[path] = module_spec program = self.parser.parse(contents) header_processor = HeaderProcessor(self, module_spec, self.include_as) for header in program.headers: header.apply(header_processor) generator = Generator(module_spec.scope, strict=self.strict) for definition in program.definitions: generator.process(definition) return module_spec
def compile(self, name, contents, path=None)
Compile the given Thrift document into a Python module. The generated module contains, .. py:attribute:: __services__ A collection of generated classes for all services defined in the thrift file. .. versionchanged:: 1.0 Renamed from ``services`` to ``__services__``. .. py:attribute:: __types__ A collection of generated types for all types defined in the thrift file. .. versionchanged:: 1.0 Renamed from ``types`` to ``__types__``. .. py:attribute:: __includes__ A collection of modules included by this module. .. versionadded:: 1.0 .. py:attribute:: __constants__ A mapping of constant name to value for all constants defined in the thrift file. .. versionchanged:: 1.0 Renamed from ``constants`` to ``__constants__``. .. py:attribute:: __thrift_source__ Contents of the .thrift file from which this module was compiled. .. versionadded:: 1.1 .. py:function:: dumps(obj) Serializes the given object using the protocol the compiler was instantiated with. .. py:function:: loads(cls, payload) Deserializes an object of type ``cls`` from ``payload`` using the protocol the compiler was instantiated with. .. py:function:: dumps.message(obj, seqid=0) Serializes the given request or response into a :py:class:`~thriftrw.wire.Message` using the protocol that the compiler was instantiated with. See :ref:`calling-apache-thrift`. .. versionadded:: 1.0 .. py:function:: loads.message(service, payload) Deserializes a :py:class:`~thriftrw.wire.Message` from ``payload`` using the protocol the compiler was instantiated with. A request or response of a method defined in the given service is parsed in the message body. See :ref:`calling-apache-thrift`. .. versionadded:: 1.0 And one class each for every struct, union, exception, enum, and service defined in the IDL. Service classes have references to :py:class:`thriftrw.spec.ServiceFunction` objects for each method defined in the service. :param str name: Name of the Thrift document. This will be the name of the generated module. :param str contents: Thrift document to compile :param str path: Path to the Thrift file being compiled. If not specified, imports from within the Thrift file will be disallowed. :returns: ModuleSpec of the generated module.
3.897827
4.029267
0.967379
r'0x[0-9A-Fa-f]+' t.value = int(t.value, 16) t.type = 'INTCONSTANT' return t
def t_HEXCONSTANT(self, t)
r'0x[0-9A-Fa-f]+
2.647985
2.581687
1.02568
r'(\"([^\\\n]|(\\.))*?\")|\'([^\\\n]|(\\.))*?\'' s = t.value[1:-1] maps = { 't': '\t', 'r': '\r', 'n': '\n', '\\': '\\', '\'': '\'', '"': '\"' } i = 0 length = len(s) val = '' while i < length: if s[i] == '\\': i += 1 if s[i] in maps: val += maps[s[i]] else: msg = 'Cannot escape character: %s' % s[i] raise ThriftParserError(msg) else: val += s[i] i += 1 t.value = val return t
def t_LITERAL(self, t)
r'(\"([^\\\n]|(\\.))*?\")|\'([^\\\n]|(\\.))*?\
2.308091
2.251129
1.025303
r'[a-zA-Z_](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*' if t.value in THRIFT_KEYWORDS: # Not an identifier after all. t.type = t.value.upper() return t
def t_IDENTIFIER(self, t)
r'[a-zA-Z_](\.[a-zA-Z_0-9]|[a-zA-Z_0-9])*
4.686517
3.645486
1.285567
# input(..) doesn't reset the lineno. We have to do that manually. self._lexer.lineno = 1 return self._lexer.input(data)
def input(self, data)
Reset the lexer and feed in new input. :param data: String of input data.
11.368528
12.944462
0.878254
photo = cls() photo.id = photo_json.get('id') photo.album_id = photo_json.get('album_id') photo.owner_id = photo_json.get('owner_id') photo.user_id = photo_json.get('user_id') photo.text = photo_json.get('text') photo.type = "photo" photo.date = photo_json.get('date') photo.photo_75 = photo_json.get('photo_75') photo.photo_130 = photo_json.get('photo_130') photo.photo_604 = photo_json.get('photo_604') photo.photo_807 = photo_json.get('photo_807') photo.photo_1280 = photo_json.get('photo_1280') photo.photo_2560 = photo_json.get('photo_2560') photo._session = session return photo
def from_json(cls, session, photo_json)
https://vk.com/dev/objects/photo
1.467706
1.369322
1.071849
response = session.fetch_items("photos.getAll", Photo.from_json, count=200, owner_id=user_or_group_id) return response
def _get_photos(session, user_or_group_id)
https://vk.com/dev/photos.getAll
8.17075
4.981285
1.64029
group_id = abs(group_id) response = session.fetch("photos.getOwnerCoverPhotoUploadServer", group_id=group_id, crop_x=crop_x, crop_y=crop_y, crop_x2=crop_x2, crop_y2=crop_y2) return response['upload_url']
def _get_owner_cover_photo_upload_server(session, group_id, crop_x=0, crop_y=0, crop_x2=795, crop_y2=200)
https://vk.com/dev/photos.getOwnerCoverPhotoUploadServer
2.303063
1.945214
1.183964
response = session.fetch('photos.saveOwnerCoverPhoto', hash=hash, photo=photo) return response
def _save_owner_cover_photo(session, hash, photo)
https://vk.com/dev/photos.saveOwnerCoverPhoto
6.038845
3.517531
1.716785
if group_id < 0: group_id = abs(group_id) response = session.fetch("photos.saveWallPhoto", photo=photo, server=server, hash=hash, user_id=user_id, group_id=group_id)[0] return response['id'], response['owner_id']
def _get_save_wall_photo(session, photo, server, hash, user_id=None, group_id=None)
https://vk.com/dev/photos.saveWallPhoto
3.242333
2.765653
1.172357
response = session.fetch("photos.saveMessagesPhoto", photo=photo, server=server, hash=hash)[0] return response['id'], response['owner_id']
def _get_save_messages_photo(session, photo, server, hash)
https://vk.com/dev/photos.saveMessagesPhoto
4.877036
3.38978
1.438747
response = self._session.fetch("users.get", user_ids=self.id, fields="city")[0] if response.get('city'): return City.from_json(self._session, response.get('city'))
def get_city(self)
:return: City or None
5.63535
4.705279
1.197665
response = self._session.fetch("users.get", user_ids=self.id, fields="country")[0] if response.get('country'): return Country.from_json(self._session, response.get('country'))
def get_country(self)
:return: Country or None
5.265833
4.628783
1.137628
response = self._session.fetch_items("users.getFollowers", self.from_json, self._session, count=1000, user_id=self.id, fields=self.USER_FIELDS) return response
def get_followers(self)
https://vk.com/dev/users.getFollowers
8.531039
6.172814
1.382034
user_json_items = session.fetch('users.get', user_ids=slug_or_user_id, fields=User.USER_FIELDS) return User.from_json(session, user_json_items[0])
def _get_user(session, slug_or_user_id)
:param slug_or_user_id: str or int :return: User
5.229066
5.241331
0.99766
# pretend we're measuring a noisy resonance at zero y = 1.0 / (1.0 + 1j*(n_x.get_value()-0.002)*1000) + _n.random.rand()*0.1 # and that it takes time to do so _t.sleep(0.1) # return mag phase return abs(y), _n.angle(y, True)
def get_data()
Currently pretends to talk to an instrument and get back the magnitud and phase of the measurement.
12.475483
10.810952
1.153967
print() for key in list(self.keys()): print(key,'=',self[key]) print()
def List(self)
Lists the keys and values.
6.154798
4.134761
1.48855
if not value == None: self.prefs[key] = value else: self.prefs.pop(key) self.Dump()
def Set(self, key, value)
Sets the key-value pair and dumps to the preferences file.
7.064232
5.189782
1.361181
full_path = _os.path.join(self.path_home, path) # only make it if it doesn't exist! if not _os.path.exists(full_path): _os.makedirs(full_path)
def MakeDir(self, path="temp")
Creates a directory of the specified path in the .spinmob directory.
3.839769
3.694427
1.039341
full_path = _os.path.join(self.path_home, path) # only if the path exists! if _os.path.exists(full_path) and _os.path.isdir(full_path): return _os.listdir(full_path) else: return []
def ListDir(self, path="temp")
Returns a list of files in the specified path (directory), or an empty list if the directory doesn't exist.
2.953531
2.964351
0.99635
if isinstance(fields, tuple) or isinstance(fields, list): return ','.join(fields) return fields
def _convert_list2str(self, fields)
:param fields: ('bdate', 'domain') :return: 'bdate,domain'
3.273785
2.896213
1.130368
'''start : header definition''' p[0] = ast.Program(headers=p[1], definitions=p[2])
def p_start(self, p)
start : header definition
8.784911
5.887042
1.492245
'''include : INCLUDE IDENTIFIER LITERAL | INCLUDE LITERAL''' if len(p) == 4: p[0] = ast.Include(name=p[2], path=p[3], lineno=p.lineno(1)) else: p[0] = ast.Include(name=None, path=p[2], lineno=p.lineno(1))
def p_include(self, p)
include : INCLUDE IDENTIFIER LITERAL | INCLUDE LITERAL
2.60626
2.151072
1.21161
'''namespace : NAMESPACE namespace_scope IDENTIFIER''' p[0] = ast.Namespace(scope=p[2], name=p[3], lineno=p.lineno(1))
def p_namespace(self, p)
namespace : NAMESPACE namespace_scope IDENTIFIER
5.189385
3.320711
1.562733
'''const : CONST field_type IDENTIFIER '=' const_value | CONST field_type IDENTIFIER '=' const_value sep''' p[0] = ast.Const( name=p[3], value_type=p[2], value=p[5], lineno=p.lineno(3), )
def p_const(self, p)
const : CONST field_type IDENTIFIER '=' const_value | CONST field_type IDENTIFIER '=' const_value sep
4.585456
3.20861
1.42911
'''const_value_primitive : INTCONSTANT | DUBCONSTANT | LITERAL | const_bool''' p[0] = ast.ConstPrimitiveValue(p[1], lineno=p.lineno(1))
def p_const_value_primitive(self, p)
const_value_primitive : INTCONSTANT | DUBCONSTANT | LITERAL | const_bool
8.393752
3.04653
2.755185
'''const_list : '[' const_list_seq ']' ''' p[0] = ast.ConstList(list(p[2]), p.lineno(1))
def p_const_list(self, p)
const_list : '[' const_list_seq ']'
5.346096
3.178077
1.682179
'''const_map : '{' const_map_seq '}' ''' p[0] = ast.ConstMap(dict(p[2]), p.lineno(1))
def p_const_map(self, p)
const_map : '{' const_map_seq '}'
5.122349
3.154327
1.623912
'''const_ref : IDENTIFIER''' p[0] = ast.ConstReference(p[1], lineno=p.lineno(1))
def p_const_ref(self, p)
const_ref : IDENTIFIER
4.982605
3.786461
1.3159
'''typedef : TYPEDEF field_type IDENTIFIER annotations''' p[0] = ast.Typedef( name=p[3], target_type=p[2], annotations=p[4], lineno=p.lineno(3) )
def p_typedef(self, p)
typedef : TYPEDEF field_type IDENTIFIER annotations
5.806858
3.762863
1.543202
def p_enum(self, p): # noqa '''enum : ENUM IDENTIFIER '{' enum_seq '}' annotations''' p[0] = ast.Enum( name=p[2], items=p[4], annotations=p[6], lineno=p.lineno(2) )
enum : ENUM IDENTIFIER '{' enum_seq '}' annotations
null
null
null
'''enum_item : IDENTIFIER '=' INTCONSTANT annotations | IDENTIFIER annotations''' if len(p) == 5: p[0] = ast.EnumItem( name=p[1], value=p[3], annotations=p[4], lineno=p.lineno(1) ) else: p[0] = ast.EnumItem( name=p[1], value=None, annotations=p[2], lineno=p.lineno(1) )
def p_enum_item(self, p)
enum_item : IDENTIFIER '=' INTCONSTANT annotations | IDENTIFIER annotations
2.614415
1.919023
1.362368
'''struct : STRUCT IDENTIFIER '{' field_seq '}' annotations''' p[0] = ast.Struct( name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2) )
def p_struct(self, p)
struct : STRUCT IDENTIFIER '{' field_seq '}' annotations
4.708426
2.823582
1.667536
'''union : UNION IDENTIFIER '{' field_seq '}' annotations''' p[0] = ast.Union( name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2) )
def p_union(self, p)
union : UNION IDENTIFIER '{' field_seq '}' annotations
5.285753
2.781934
1.900028
'''exception : EXCEPTION IDENTIFIER '{' field_seq '}' annotations''' p[0] = ast.Exc( name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2) )
def p_exception(self, p)
exception : EXCEPTION IDENTIFIER '{' field_seq '}' annotations
6.43138
3.410146
1.885954
'''service : SERVICE IDENTIFIER '{' function_seq '}' annotations | SERVICE IDENTIFIER EXTENDS IDENTIFIER \ '{' function_seq '}' annotations ''' if len(p) == 7: p[0] = ast.Service( name=p[2], functions=p[4], parent=None, annotations=p[6], lineno=p.lineno(2), ) else: p[0] = ast.Service( name=p[2], functions=p[6], parent=ast.ServiceReference(p[4], p.lineno(4)), annotations=p[8], lineno=p.lineno(2), )
def p_service(self, p)
service : SERVICE IDENTIFIER '{' function_seq '}' annotations | SERVICE IDENTIFIER EXTENDS IDENTIFIER \ '{' function_seq '}' annotations
2.818029
2.016153
1.397726
'''function : oneway function_type IDENTIFIER '(' field_seq ')' \ throws annotations ''' p[0] = ast.Function( name=p[3], parameters=p[5], return_type=p[2], exceptions=p[7], oneway=p[1], annotations=p[8], lineno=p.lineno(3), )
def p_function(self, p)
function : oneway function_type IDENTIFIER '(' field_seq ')' \ throws annotations
5.842584
2.840426
2.056939
'''field : field_id field_req field_type IDENTIFIER annotations | field_id field_req field_type IDENTIFIER '=' const_value \ annotations''' if len(p) == 8: default = p[6] annotations = p[7] else: default = None annotations = p[5] p[0] = ast.Field( id=p[1], name=p[4], field_type=p[3], requiredness=p[2], default=default, annotations=annotations, lineno=p.lineno(4), )
def p_field(self, p)
field : field_id field_req field_type IDENTIFIER annotations | field_id field_req field_type IDENTIFIER '=' const_value \ annotations
4.058283
2.664645
1.523011
'''field_id : INTCONSTANT ':' | ''' if len(p) == 3: if p[1] == 0: # Prevent users from ever using field ID 0. It's reserved for # internal use only. raise ThriftParserError( 'Line %d: Field ID 0 is reserved for internal use.' % p.lineno(1) ) p[0] = p[1] else: p[0] = None
def p_field_id(self, p)
field_id : INTCONSTANT ':' |
5.591487
4.330026
1.291329
'''ref_type : IDENTIFIER''' p[0] = ast.DefinedType(p[1], lineno=p.lineno(1))
def p_ref_type(self, p)
ref_type : IDENTIFIER
6.406199
4.574843
1.40031
name = 'byte' p[0] = ast.PrimitiveType(name, p[2])
def p_base_type(self, p): # noqa '''base_type : BOOL annotations | BYTE annotations | I8 annotations | I16 annotations | I32 annotations | I64 annotations | DOUBLE annotations | STRING annotations | BINARY annotations''' name = p[1] if name == 'i8'
base_type : BOOL annotations | BYTE annotations | I8 annotations | I16 annotations | I32 annotations | I64 annotations | DOUBLE annotations | STRING annotations | BINARY annotations
9.440545
8.119414
1.162713
'''map_type : MAP '<' field_type ',' field_type '>' annotations''' p[0] = ast.MapType(key_type=p[3], value_type=p[5], annotations=p[7])
def p_map_type(self, p)
map_type : MAP '<' field_type ',' field_type '>' annotations
4.301949
2.249452
1.912443
'''list_type : LIST '<' field_type '>' annotations''' p[0] = ast.ListType(value_type=p[3], annotations=p[5])
def p_list_type(self, p)
list_type : LIST '<' field_type '>' annotations
6.608703
3.11634
2.120662
'''set_type : SET '<' field_type '>' annotations''' p[0] = ast.SetType(value_type=p[3], annotations=p[5])
def p_set_type(self, p)
set_type : SET '<' field_type '>' annotations
6.503874
3.123755
2.082069
'''annotation : IDENTIFIER '=' LITERAL | IDENTIFIER''' if len(p) == 4: p[0] = ast.Annotation(p[1], p[3], lineno=p.lineno(1)) else: p[0] = ast.Annotation(p[1], True, lineno=p.lineno(1))
def p_annotation(self, p)
annotation : IDENTIFIER '=' LITERAL | IDENTIFIER
3.101187
2.572259
1.205628
# This basically says: # # - When you reach the end of the list, construct and return an empty # deque. # - Otherwise, prepend to start of what you got from the parser. # # So this ends up constructing an in-order list. if len(p) == 4: p[3].appendleft(p[1]) p[0] = p[3] elif len(p) == 3: p[2].appendleft(p[1]) p[0] = p[2] elif len(p) == 1: p[0] = deque() else: raise ThriftParserError( 'Wrong number of tokens received for expression at line %d' % p.lineno(1) )
def _parse_seq(self, p)
Helper to parse sequence rules. Sequence rules are in the form:: foo : foo_item sep foo | foo_item foo | This function builds a deque of the items in-order. If the number of tokens doesn't match, an exception is raised.
4.392564
4.136519
1.061899
return self._parser.parse(input, lexer=self._lexer, **kwargs)
def parse(self, input, **kwargs)
Parse the given input. :param input: String containing the text to be parsed. :raises thriftrw.errors.ThriftParserError: For parsing errors.
5.266526
8.452838
0.623048
return session.fetch_items("wall.getComments", Comment.from_json, count=100, owner_id=group_or_user_id, post_id=wall_id, need_likes=1)
def _get_comments(session, group_or_user_id, wall_id)
https://vk.com/dev/wall.getComments
4.672402
3.942225
1.185219
response = session.fetch("wall.getComments", count=100, owner_id=group_or_user_id, post_id=wall_id) return response.get('count')
def _get_comments_count(session, group_or_user_id, wall_id)
https://vk.com/dev/wall.getComments
3.504504
2.74298
1.277626
from .users import User return self._session.fetch_items('likes.getList', User._get_user, count=100, type='post', owner_id=self.from_id, item_id=self.id)
def get_likes(self)
https://vk.com/dev/likes.getList
9.259999
5.922198
1.563609
response = self._session.fetch('likes.getList', count=1, type='post', owner_id=self.from_id, item_id=self.id) likes_count = response.get('count') return likes_count
def get_likes_count(self)
https://vk.com/dev/likes.getList
4.837432
3.640089
1.328932
return self._session.fetch_items('wall.getReposts', self.from_json, count=1000, owner_id=self.from_id, post_id=self.id)
def get_reposts(self)
https://vk.com/dev/wall.getReposts
7.127427
4.919209
1.448897
response = session.fetch("wall.post", owner_id=owner_id, message=message, attachments=attachments, from_group=from_group) return response
def _wall_post(session, owner_id, message=None, attachments=None, from_group=True)
https://vk.com/dev/wall.post attachments: "photo100172_166443618,photo-1_265827614"
2.652681
2.267514
1.169863