signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def parse_port_pin(name_str):
|
if len(name_str) < <NUM_LIT:3>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if name_str[<NUM_LIT:0>] != '<STR_LIT:P>':<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if name_str[<NUM_LIT:1>] < '<STR_LIT:A>' or name_str[<NUM_LIT:1>] > '<STR_LIT>':<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>port = ord(name_str[<NUM_LIT:1>]) - ord('<STR_LIT:A>')<EOL>pin_str = name_str[<NUM_LIT:2>:]<EOL>if not pin_str.isdigit():<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>return (port, int(pin_str))<EOL>
|
Parses a string and returns a (port-num, pin-num) tuple.
|
f7029:m0
|
def ptr(self):
|
if self.fn_num is None:<EOL><INDENT>return self.func<EOL><DEDENT>return '<STR_LIT>'.format(self.func, self.fn_num)<EOL>
|
Returns the numbered function (i.e. USART6) for this AF.
|
f7029:c0:m2
|
def print(self):
|
cond_var = None<EOL>if self.supported:<EOL><INDENT>cond_var = conditional_var('<STR_LIT>'.format(self.func, self.fn_num))<EOL>print_conditional_if(cond_var)<EOL>print('<STR_LIT>', end='<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>print('<STR_LIT>', end='<STR_LIT>')<EOL><DEDENT>fn_num = self.fn_num<EOL>if fn_num is None:<EOL><INDENT>fn_num = <NUM_LIT:0><EOL><DEDENT>print('<STR_LIT>'.format(self.idx,<EOL>self.func, fn_num, self.pin_type, self.ptr(), self.af_str))<EOL>print_conditional_endif(cond_var)<EOL>
|
Prints the C representation of this AF.
|
f7029:c0:m4
|
def parse_port_pin(name_str):
|
if len(name_str) < <NUM_LIT:3>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if name_str[:<NUM_LIT:2>] != '<STR_LIT>':<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if not name_str[<NUM_LIT:2>:].isdigit():<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>port = int(int(name_str[<NUM_LIT:2>:]) / <NUM_LIT:8>)<EOL>gpio_bit = <NUM_LIT:1> << int(int(name_str[<NUM_LIT:2>:]) % <NUM_LIT:8>)<EOL>return (port, gpio_bit)<EOL>
|
Parses a string and returns a (port, gpio_bit) tuple.
|
f7037:m0
|
def parse_port_pin(name_str):
|
if len(name_str) < <NUM_LIT:4>:<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if name_str[<NUM_LIT:0>:<NUM_LIT:2>] != '<STR_LIT>':<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>if name_str[<NUM_LIT:2>] not in ('<STR_LIT:A>', '<STR_LIT:B>', '<STR_LIT:C>', '<STR_LIT:D>', '<STR_LIT:E>', '<STR_LIT>'):<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>port = ord(name_str[<NUM_LIT:2>]) - ord('<STR_LIT:A>')<EOL>pin_str = name_str[<NUM_LIT:3>:].split('<STR_LIT:/>')[<NUM_LIT:0>]<EOL>if not pin_str.isdigit():<EOL><INDENT>raise ValueError("<STR_LIT>")<EOL><DEDENT>return (port, int(pin_str))<EOL>
|
Parses a string and returns a (port-num, pin-num) tuple.
|
f7057:m0
|
def ptr(self):
|
if self.fn_num is None:<EOL><INDENT>return self.func<EOL><DEDENT>return '<STR_LIT>'.format(self.func, self.fn_num)<EOL>
|
Returns the numbered function (i.e. USART6) for this AF.
|
f7057:c0:m2
|
def print(self):
|
if self.supported:<EOL><INDENT>print('<STR_LIT>', end='<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>print('<STR_LIT>', end='<STR_LIT>')<EOL><DEDENT>fn_num = self.fn_num<EOL>if fn_num is None:<EOL><INDENT>fn_num = <NUM_LIT:0><EOL><DEDENT>print('<STR_LIT>'.format(self.idx,<EOL>self.func, fn_num, self.pin_type, self.ptr(), self.af_str))<EOL>
|
Prints the C representation of this AF.
|
f7057:c0:m4
|
def get_product_metadata_path(product_name):
|
string_date = product_name.split('<STR_LIT:_>')[-<NUM_LIT:1>]<EOL>date = datetime.datetime.strptime(string_date, '<STR_LIT>')<EOL>path = '<STR_LIT>'.format(date.year, date.month, date.day, product_name)<EOL>return {<EOL>product_name: {<EOL>'<STR_LIT>': '<STR_LIT>'.format(path, '<STR_LIT>'),<EOL>'<STR_LIT>': get_tile_metadata_path('<STR_LIT>'.format(path, '<STR_LIT>'))<EOL>}<EOL>}<EOL>
|
gets a single products metadata
|
f7078:m1
|
def get_products_metadata_path(year, month, day):
|
products = {}<EOL>path = '<STR_LIT>'.format(year, month, day)<EOL>for key in bucket.objects.filter(Prefix=path):<EOL><INDENT>product_path = key.key.replace(path, '<STR_LIT>').split('<STR_LIT:/>')<EOL>name = product_path[<NUM_LIT:0>]<EOL>if name not in products:<EOL><INDENT>products[name] = {}<EOL><DEDENT>if product_path[<NUM_LIT:1>] == '<STR_LIT>':<EOL><INDENT>products[name]['<STR_LIT>'] = key.key<EOL><DEDENT>if product_path[<NUM_LIT:1>] == '<STR_LIT>':<EOL><INDENT>products[name]['<STR_LIT>'] = get_tile_metadata_path(key.key)<EOL><DEDENT><DEDENT>return products<EOL>
|
Get paths to multiple products metadata
|
f7078:m2
|
def product_metadata(product, dst_folder, counter=None, writers=[file_writer], geometry_check=None):
|
if not counter:<EOL><INDENT>counter = {<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': []<EOL>}<EOL><DEDENT>s3_url = '<STR_LIT>'<EOL>product_meta_link = '<STR_LIT>'.format(s3_url, product['<STR_LIT>'])<EOL>product_info = requests.get(product_meta_link, stream=True)<EOL>product_metadata = metadata_to_dict(product_info.raw)<EOL>product_metadata['<STR_LIT>'] = product_meta_link<EOL>counter['<STR_LIT>'] += <NUM_LIT:1><EOL>for tile in product['<STR_LIT>']:<EOL><INDENT>tile_info = requests.get('<STR_LIT>'.format(s3_url, tile))<EOL>try:<EOL><INDENT>metadata = tile_metadata(tile_info.json(), copy(product_metadata), geometry_check)<EOL>for w in writers:<EOL><INDENT>w(dst_folder, metadata)<EOL><DEDENT>logger.info('<STR_LIT>' % metadata['<STR_LIT>'])<EOL>counter['<STR_LIT>'] += <NUM_LIT:1><EOL><DEDENT>except JSONDecodeError:<EOL><INDENT>logger.warning('<STR_LIT>' % tile)<EOL>counter['<STR_LIT>'] += <NUM_LIT:1><EOL>counter['<STR_LIT>'].append(tile)<EOL><DEDENT><DEDENT>return counter<EOL>
|
Extract metadata for a specific product
|
f7079:m3
|
def daily_metadata(year, month, day, dst_folder, writers=[file_writer], geometry_check=None,<EOL>num_worker_threads=<NUM_LIT:1>):
|
threaded = False<EOL>counter = {<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': []<EOL>}<EOL>if num_worker_threads > <NUM_LIT:1>:<EOL><INDENT>threaded = True<EOL>queue = Queue()<EOL><DEDENT>year_dir = os.path.join(dst_folder, str(year))<EOL>month_dir = os.path.join(year_dir, str(month))<EOL>day_dir = os.path.join(month_dir, str(day))<EOL>product_list = get_products_metadata_path(year, month, day)<EOL>logger.info('<STR_LIT>' % (len(list(iterkeys(product_list))),<EOL>year, month, day))<EOL>for name, product in iteritems(product_list):<EOL><INDENT>product_dir = os.path.join(day_dir, name)<EOL>if threaded:<EOL><INDENT>queue.put([product, product_dir, counter, writers, geometry_check])<EOL><DEDENT>else:<EOL><INDENT>counter = product_metadata(product, product_dir, counter, writers, geometry_check)<EOL><DEDENT><DEDENT>if threaded:<EOL><INDENT>def worker():<EOL><INDENT>while not queue.empty():<EOL><INDENT>args = queue.get()<EOL>try:<EOL><INDENT>product_metadata(*args)<EOL><DEDENT>except Exception:<EOL><INDENT>exc = sys.exc_info()<EOL>logger.error('<STR_LIT>' % (threading.current_thread().name,<EOL>exc[<NUM_LIT:1>].__str__()))<EOL>args[<NUM_LIT:2>]['<STR_LIT>'] += <NUM_LIT:1><EOL><DEDENT>queue.task_done()<EOL><DEDENT><DEDENT>threads = []<EOL>for i in range(num_worker_threads):<EOL><INDENT>t = threading.Thread(target=worker)<EOL>t.start()<EOL>threads.append(t)<EOL><DEDENT>queue.join()<EOL><DEDENT>return counter<EOL>
|
Extra metadata for all products in a specific date
|
f7079:m5
|
def range_metadata(start, end, dst_folder, num_worker_threads=<NUM_LIT:0>, writers=[file_writer], geometry_check=None):
|
assert isinstance(start, date)<EOL>assert isinstance(end, date)<EOL>delta = end - start<EOL>dates = []<EOL>for i in range(delta.days + <NUM_LIT:1>):<EOL><INDENT>dates.append(start + timedelta(days=i))<EOL><DEDENT>days = len(dates)<EOL>total_counter = {<EOL>'<STR_LIT>': days,<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': <NUM_LIT:0>,<EOL>'<STR_LIT>': []<EOL>}<EOL>def update_counter(counter):<EOL><INDENT>for key in iterkeys(total_counter):<EOL><INDENT>if key in counter:<EOL><INDENT>total_counter[key] += counter[key]<EOL><DEDENT><DEDENT><DEDENT>for d in dates:<EOL><INDENT>logger.info('<STR_LIT>'.format(d.year, d.month, d.day))<EOL>update_counter(daily_metadata(d.year, d.month, d.day, dst_folder, writers, geometry_check,<EOL>num_worker_threads))<EOL><DEDENT>return total_counter<EOL>
|
Extra metadata for all products in a date range
|
f7079:m6
|
def epsg_code(geojson):
|
if isinstance(geojson, dict):<EOL><INDENT>if '<STR_LIT>' in geojson:<EOL><INDENT>urn = geojson['<STR_LIT>']['<STR_LIT>']['<STR_LIT:name>'].split('<STR_LIT::>')<EOL>if '<STR_LIT>' in urn:<EOL><INDENT>try:<EOL><INDENT>return int(urn[-<NUM_LIT:1>])<EOL><DEDENT>except (TypeError, ValueError):<EOL><INDENT>return None<EOL><DEDENT><DEDENT><DEDENT><DEDENT>return None<EOL>
|
get the espg code from the crs system
|
f7081:m0
|
def convert_coordinates(coords, origin, wgs84, wrapped):
|
if isinstance(coords, list) or isinstance(coords, tuple):<EOL><INDENT>try:<EOL><INDENT>if isinstance(coords[<NUM_LIT:0>], list) or isinstance(coords[<NUM_LIT:0>], tuple):<EOL><INDENT>return [convert_coordinates(list(c), origin, wgs84, wrapped) for c in coords]<EOL><DEDENT>elif isinstance(coords[<NUM_LIT:0>], float):<EOL><INDENT>c = list(transform(origin, wgs84, *coords))<EOL>if wrapped and c[<NUM_LIT:0>] < -<NUM_LIT>:<EOL><INDENT>c[<NUM_LIT:0>] = c[<NUM_LIT:0>] + <NUM_LIT><EOL><DEDENT>return c<EOL><DEDENT><DEDENT>except IndexError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>return None<EOL>
|
Convert coordinates from one crs to another
|
f7081:m2
|
def to_latlon(geojson, origin_espg=None):
|
if isinstance(geojson, dict):<EOL><INDENT>if origin_espg:<EOL><INDENT>code = origin_espg<EOL><DEDENT>else:<EOL><INDENT>code = epsg_code(geojson)<EOL><DEDENT>if code:<EOL><INDENT>origin = Proj(init='<STR_LIT>' % code)<EOL>wgs84 = Proj(init='<STR_LIT>')<EOL>wrapped = test_wrap_coordinates(geojson['<STR_LIT>'], origin, wgs84)<EOL>new_coords = convert_coordinates(geojson['<STR_LIT>'], origin, wgs84, wrapped)<EOL>if new_coords:<EOL><INDENT>geojson['<STR_LIT>'] = new_coords<EOL><DEDENT>try:<EOL><INDENT>del geojson['<STR_LIT>']<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT>return geojson<EOL>
|
Convert a given geojson to wgs84. The original epsg must be included insde the crs
tag of geojson
|
f7081:m3
|
def camelcase_underscore(name):
|
s1 = re.sub('<STR_LIT>', r'<STR_LIT>', name)<EOL>return re.sub('<STR_LIT>', r'<STR_LIT>', s1).lower()<EOL>
|
Convert camelcase names to underscore
|
f7081:m4
|
def get_tiles_list(element):
|
tiles = {}<EOL>for el in element:<EOL><INDENT>g = (el.findall('<STR_LIT>') or el.findall('<STR_LIT>'))[<NUM_LIT:0>]<EOL>name = g.attrib['<STR_LIT>']<EOL>name_parts = name.split('<STR_LIT:_>')<EOL>mgs = name_parts[-<NUM_LIT:2>]<EOL>tiles[mgs] = name<EOL><DEDENT>return tiles<EOL>
|
Returns the list of all tile names from Product_Organisation element
in metadata.xml
|
f7081:m5
|
def metadata_to_dict(metadata):
|
tree = etree.parse(metadata)<EOL>root = tree.getroot()<EOL>meta = OrderedDict()<EOL>keys = [<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>]<EOL>for key in keys:<EOL><INDENT>try:<EOL><INDENT>meta[key.lower()] = root.findall('<STR_LIT>' + key)[<NUM_LIT:0>].text<EOL><DEDENT>except IndexError:<EOL><INDENT>meta[key.lower()] = None<EOL><DEDENT><DEDENT>meta['<STR_LIT>'] = float(meta.pop('<STR_LIT>'))<EOL>meta['<STR_LIT>'] = int(meta['<STR_LIT>'])<EOL>meta['<STR_LIT>'] = get_tiles_list(root.findall('<STR_LIT>')[<NUM_LIT:0>])<EOL>if root.findall('<STR_LIT>'):<EOL><INDENT>bands = root.findall('<STR_LIT>')[<NUM_LIT:0>]<EOL>meta['<STR_LIT>'] = []<EOL>for b in bands:<EOL><INDENT>band = b.text.replace('<STR_LIT:B>', '<STR_LIT>')<EOL>if len(band) == <NUM_LIT:1>:<EOL><INDENT>band = '<STR_LIT:B>' + pad(band, <NUM_LIT:2>)<EOL><DEDENT>else:<EOL><INDENT>band = b.text<EOL><DEDENT>meta['<STR_LIT>'].append(band)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>bands = root.findall('<STR_LIT>')[<NUM_LIT:0>]<EOL>meta['<STR_LIT>'] = []<EOL>for b in bands:<EOL><INDENT>band = b.attrib['<STR_LIT>'].replace('<STR_LIT:B>', '<STR_LIT>')<EOL>if len(band) == <NUM_LIT:1>:<EOL><INDENT>band = '<STR_LIT:B>' + pad(band, <NUM_LIT:2>)<EOL><DEDENT>else:<EOL><INDENT>band = b.attrib['<STR_LIT>']<EOL><DEDENT>meta['<STR_LIT>'].append(band)<EOL><DEDENT><DEDENT>return meta<EOL>
|
Looks at metadata.xml file of sentinel product and extract useful keys
Returns a python dict
|
f7081:m6
|
def get_tile_geometry(path, origin_espg, tolerance=<NUM_LIT>):
|
with rasterio.open(path) as src:<EOL><INDENT>b = src.bounds<EOL>tile_shape = Polygon([(b[<NUM_LIT:0>], b[<NUM_LIT:1>]), (b[<NUM_LIT:2>], b[<NUM_LIT:1>]), (b[<NUM_LIT:2>], b[<NUM_LIT:3>]), (b[<NUM_LIT:0>], b[<NUM_LIT:3>]), (b[<NUM_LIT:0>], b[<NUM_LIT:1>])])<EOL>tile_geojson = mapping(tile_shape)<EOL>image = src.read(<NUM_LIT:1>)<EOL>mask = image == <NUM_LIT:0.><EOL>novalue_shape = shapes(image, mask=mask, transform=src.affine)<EOL>novalue_shape = [Polygon(s['<STR_LIT>'][<NUM_LIT:0>]) for (s, v) in novalue_shape]<EOL>if novalue_shape:<EOL><INDENT>union = cascaded_union(novalue_shape)<EOL>data_shape = tile_shape.difference(union)<EOL>if data_shape.geom_type == '<STR_LIT>':<EOL><INDENT>areas = {p.area: i for i, p in enumerate(data_shape)}<EOL>largest = max(areas.keys())<EOL>data_shape = data_shape[areas[largest]]<EOL><DEDENT>if list(data_shape.interiors):<EOL><INDENT>data_shape = Polygon(data_shape.exterior.coords)<EOL><DEDENT>data_shape = data_shape.simplify(tolerance, preserve_topology=False)<EOL>data_geojson = mapping(data_shape)<EOL><DEDENT>else:<EOL><INDENT>data_geojson = tile_geojson<EOL><DEDENT>return (to_latlon(tile_geojson, origin_espg), to_latlon(data_geojson, origin_espg))<EOL><DEDENT>
|
Calculate the data and tile geometry for sentinel-2 tiles
|
f7081:m7
|
def tile_metadata(tile, product, geometry_check=None):
|
grid = '<STR_LIT>'.format(pad(tile['<STR_LIT>'], <NUM_LIT:2>), tile['<STR_LIT>'], tile['<STR_LIT>'])<EOL>meta = OrderedDict({<EOL>'<STR_LIT>': product['<STR_LIT>'][grid]<EOL>})<EOL>logger.info('<STR_LIT>' % (threading.current_thread().name, tile['<STR_LIT:path>']))<EOL>meta['<STR_LIT:date>'] = tile['<STR_LIT>'].split('<STR_LIT:T>')[<NUM_LIT:0>]<EOL>meta['<STR_LIT>'] = '<STR_LIT>'.format(tile['<STR_LIT:path>'], s3_url)<EOL>product.pop('<STR_LIT>')<EOL>tile.pop('<STR_LIT>')<EOL>bands = product.pop('<STR_LIT>')<EOL>for k, v in iteritems(tile):<EOL><INDENT>meta[camelcase_underscore(k)] = v<EOL><DEDENT>meta.update(product)<EOL>links = ['<STR_LIT>'.format(meta['<STR_LIT:path>'], b, s3_url) for b in bands]<EOL>meta['<STR_LIT>'] = {<EOL>'<STR_LIT>': links<EOL>}<EOL>meta['<STR_LIT>'] = '<STR_LIT>'.format(s3_url, meta['<STR_LIT:path>'])<EOL>def internal_latlon(meta):<EOL><INDENT>keys = ['<STR_LIT>', '<STR_LIT>', '<STR_LIT>']<EOL>for key in keys:<EOL><INDENT>if key in meta:<EOL><INDENT>meta[key] = to_latlon(meta[key])<EOL><DEDENT><DEDENT>return meta<EOL><DEDENT>if geometry_check:<EOL><INDENT>if geometry_check(meta):<EOL><INDENT>meta = get_tile_geometry_from_s3(meta)<EOL><DEDENT>else:<EOL><INDENT>meta = internal_latlon(meta)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>meta = internal_latlon(meta)<EOL><DEDENT>meta['<STR_LIT>'] = meta.pop('<STR_LIT:path>')<EOL>return meta<EOL>
|
Generate metadata for a given tile
- geometry_check is a function the determines whether to calculate the geometry by downloading
B01 and override provided geometry in tilejson. The meta object is passed to this function.
The function return a True or False response.
|
f7081:m9
|
def __init__(self,<EOL>session: sa_orm.Session,<EOL>models: Union[List[type], Dict[str, type]],<EOL>date_factory: Optional[FunctionType] = None,<EOL>datetime_factory: Optional[FunctionType] = None):
|
super().__init__()<EOL>self.session = session<EOL>self.models = (models if isinstance(models, dict)<EOL>else {model.__name__: model for model in models})<EOL>self.model_instances = {}<EOL>self.datetime_factory = datetime_factory or utils.datetime_factory<EOL>self.date_factory = date_factory or utils.date_factory<EOL>
|
:param session: the sqlalchemy session
:param models: list of model classes, or dictionary of models by name
:param date_factory: function used to generate dates (takes one
parameter, the text value to convert)
:param datetime_factory: function used to generate datetimes (takes one
parameter, the text value to convert)
|
f7086:c0:m0
|
def create_or_update(self,<EOL>identifier: Identifier,<EOL>data: Dict[str, Any],<EOL>) -> Tuple[object, bool]:
|
raise NotImplementedError<EOL>
|
Create or update a model.
:param identifier: An object with :attr:`class_name` and :attr:`key`
attributes
:param data: A dictionary keyed by column name, with values being the
converted values to set on the model instance
:return: A two-tuple of model instance and whether or not it was created.
|
f7088:c0:m1
|
def get_relationships(self, class_name: str) -> Set[str]:
|
raise NotImplementedError<EOL>
|
Return a list of model attribute names that could have relationships for
the given model class name.
:param class_name: The name of the class name to discover relationships for.
:return: A set of model attribute names.
|
f7088:c0:m2
|
def maybe_convert_values(self,<EOL>identifier: Identifier,<EOL>data: Dict[str, Any],<EOL>) -> Dict[str, Any]:
|
raise NotImplementedError<EOL>
|
Takes a dictionary of raw values for a specific identifier, as parsed
from the YAML file, and depending upon the type of db column the data
is meant for, decides what to do with the value (eg leave it alone,
convert a string to a date/time instance, or convert identifiers to
model instances by calling :meth:`self.loader.convert_identifiers`)
:param identifier: An object with :attr:`class_name` and :attr:`key`
attributes
:param data: A dictionary keyed by column name, with values being the
raw values as parsed from the YAML
:return: A dictionary keyed by column name, with values being the
converted values meant to be set on the model instance
|
f7088:c0:m3
|
def commit(self):
|
pass<EOL>
|
If your ORM implements the data mapper pattern instead of active
record, then you can implement this method to commit the session after
all the models have been added to it.
|
f7088:c0:m4
|
def create_all(self, progress_callback: Optional[callable] = None) -> Dict[str, object]:
|
if not self._loaded:<EOL><INDENT>self._load_data()<EOL><DEDENT>dag = nx.DiGraph()<EOL>for model_class_name, dependencies in self.relationships.items():<EOL><INDENT>dag.add_node(model_class_name)<EOL>for dep in dependencies:<EOL><INDENT>dag.add_edge(model_class_name, dep)<EOL><DEDENT><DEDENT>try:<EOL><INDENT>creation_order = reversed(list(nx.topological_sort(dag)))<EOL><DEDENT>except nx.NetworkXUnfeasible:<EOL><INDENT>raise Exception('<STR_LIT>'<EOL>'<STR_LIT:U+002CU+0020>'.join(['<STR_LIT>'.format(a=a, b=b)<EOL>for a, b in nx.find_cycle(dag)]))<EOL><DEDENT>rv = {}<EOL>for model_class_name in creation_order:<EOL><INDENT>for identifier_key, data in self.model_fixtures[model_class_name].items():<EOL><INDENT>identifier = Identifier(model_class_name, identifier_key)<EOL>data = self.factory.maybe_convert_values(identifier, data)<EOL>self._cache[identifier_key] = data<EOL>model_instance, created = self.factory.create_or_update(identifier, data)<EOL>if progress_callback:<EOL><INDENT>progress_callback(identifier, model_instance, created)<EOL><DEDENT>rv[identifier_key] = model_instance<EOL><DEDENT><DEDENT>self.factory.commit()<EOL>return rv<EOL>
|
Creates all the models discovered from fixture files in :attr:`fixtures_dir`.
:param progress_callback: An optional function to track progress. It must take three
parameters:
- an :class:`Identifier`
- the model instance
- and a boolean specifying whether the model was created
:return: A dictionary keyed by identifier where the values are model instances.
|
f7099:c0:m1
|
def convert_identifiers(self, identifiers: Union[Identifier, List[Identifier]]):
|
if not identifiers:<EOL><INDENT>return identifiers<EOL><DEDENT>def _create_or_update(identifier):<EOL><INDENT>data = self._cache[identifier.key]<EOL>return self.factory.create_or_update(identifier, data)[<NUM_LIT:0>]<EOL><DEDENT>if isinstance(identifiers, Identifier):<EOL><INDENT>return _create_or_update(identifiers)<EOL><DEDENT>elif isinstance(identifiers, list) and isinstance(identifiers[<NUM_LIT:0>], Identifier):<EOL><INDENT>return [_create_or_update(identifier) for identifier in identifiers]<EOL><DEDENT>else:<EOL><INDENT>raise TypeError('<STR_LIT>')<EOL><DEDENT>
|
Convert an individual :class:`Identifier` to a model instance,
or a list of Identifiers to a list of model instances.
|
f7099:c0:m2
|
def _load_data(self):
|
filenames = []<EOL>model_identifiers = defaultdict(list)<EOL>for fixtures_dir in self.fixture_dirs:<EOL><INDENT>for filename in os.listdir(fixtures_dir):<EOL><INDENT>path = os.path.join(fixtures_dir, filename)<EOL>file_ext = filename[filename.find('<STR_LIT:.>')+<NUM_LIT:1>:]<EOL>if os.path.isfile(path) and file_ext in {'<STR_LIT>', '<STR_LIT>'}:<EOL><INDENT>filenames.append(filename)<EOL>with open(path) as f:<EOL><INDENT>self._cache[filename] = f.read()<EOL><DEDENT>with self._preloading_env() as env:<EOL><INDENT>rendered_yaml = env.get_template(filename).render()<EOL>data = yaml.load(rendered_yaml)<EOL>if data:<EOL><INDENT>class_name = filename[:filename.rfind('<STR_LIT:.>')]<EOL>model_identifiers[class_name] = list(data.keys())<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>for filename in filenames:<EOL><INDENT>self._load_from_yaml(filename, model_identifiers)<EOL><DEDENT>self._loaded = True<EOL>
|
Load all fixtures from :attr:`fixtures_dir`
|
f7099:c0:m3
|
def _load_from_yaml(self, filename: str, model_identifiers: Dict[str, List[str]]):
|
class_name = filename[:filename.rfind('<STR_LIT:.>')]<EOL>rendered_yaml = self.env.get_template(filename).render(<EOL>model_identifiers=model_identifiers)<EOL>fixture_data, self.relationships[class_name] = self._post_process_yaml_data(<EOL>yaml.load(rendered_yaml),<EOL>self.factory.get_relationships(class_name))<EOL>for identifier_key, data in fixture_data.items():<EOL><INDENT>self.model_fixtures[class_name][identifier_key] = data<EOL><DEDENT>
|
Load fixtures from the given filename
|
f7099:c0:m4
|
def _post_process_yaml_data(self,<EOL>fixture_data: Dict[str, Dict[str, Any]],<EOL>relationship_columns: Set[str],<EOL>) -> Tuple[Dict[str, Dict[str, Any]], List[str]]:
|
rv = {}<EOL>relationships = set()<EOL>if not fixture_data:<EOL><INDENT>return rv, relationships<EOL><DEDENT>for identifier_id, data in fixture_data.items():<EOL><INDENT>new_data = {}<EOL>for col_name, value in data.items():<EOL><INDENT>if col_name not in relationship_columns:<EOL><INDENT>new_data[col_name] = value<EOL>continue<EOL><DEDENT>identifiers = normalize_identifiers(value)<EOL>if identifiers:<EOL><INDENT>relationships.add(identifiers[<NUM_LIT:0>].class_name)<EOL><DEDENT>if isinstance(value, str) and len(identifiers) <= <NUM_LIT:1>:<EOL><INDENT>new_data[col_name] = identifiers[<NUM_LIT:0>] if identifiers else None<EOL><DEDENT>else:<EOL><INDENT>new_data[col_name] = identifiers<EOL><DEDENT><DEDENT>rv[identifier_id] = new_data<EOL><DEDENT>return rv, list(relationships)<EOL>
|
Convert and normalize identifier strings to Identifiers, as well as determine
class relationships.
|
f7099:c0:m5
|
def _ensure_env(self, env: Union[jinja2.Environment, None]):
|
if not env:<EOL><INDENT>env = jinja2.Environment()<EOL><DEDENT>if not env.loader:<EOL><INDENT>env.loader = jinja2.FunctionLoader(lambda filename: self._cache[filename])<EOL><DEDENT>if '<STR_LIT>' not in env.globals:<EOL><INDENT>faker = Faker()<EOL>faker.seed(<NUM_LIT>)<EOL>env.globals['<STR_LIT>'] = faker<EOL><DEDENT>if '<STR_LIT>' not in env.globals:<EOL><INDENT>env.globals['<STR_LIT>'] = jinja2.contextfunction(random_model)<EOL><DEDENT>if '<STR_LIT>' not in env.globals:<EOL><INDENT>env.globals['<STR_LIT>'] = jinja2.contextfunction(random_models)<EOL><DEDENT>return env<EOL>
|
Make sure the jinja environment is minimally configured.
|
f7099:c0:m6
|
@contextlib.contextmanager<EOL><INDENT>def _preloading_env(self):<DEDENT>
|
ctx = self.env.globals<EOL>try:<EOL><INDENT>ctx['<STR_LIT>'] = lambda *a, **kw: None<EOL>ctx['<STR_LIT>'] = lambda *a, **kw: None<EOL>yield self.env<EOL><DEDENT>finally:<EOL><INDENT>ctx['<STR_LIT>'] = jinja2.contextfunction(random_model)<EOL>ctx['<STR_LIT>'] = jinja2.contextfunction(random_models)<EOL><DEDENT>
|
A "stripped" jinja environment.
|
f7099:c0:m7
|
def random_model(ctx, model_class_name):
|
model_identifiers = ctx['<STR_LIT>'][model_class_name]<EOL>if not model_identifiers:<EOL><INDENT>return '<STR_LIT:None>'<EOL><DEDENT>idx = random.randrange(<NUM_LIT:0>, len(model_identifiers))<EOL>return '<STR_LIT>' % (model_class_name, model_identifiers[idx])<EOL>
|
Get a random model identifier by class name. For example::
# db/fixtures/Category.yml
{% for i in range(0, 10) %}
category{{ i }}:
name: {{ faker.name() }}
{% endfor %}
# db/fixtures/Post.yml
a_blog_post:
category: {{ random_model('Category') }}
Will render to something like the following::
# db/fixtures/Post.yml (rendered)
a blog_post:
category: "Category(category7)"
:param ctx: The context variables of the current template (passed automatically)
:param model_class_name: The class name of the model to get.
|
f7100:m2
|
def random_models(ctx, model_class_name, min_count=<NUM_LIT:0>, max_count=<NUM_LIT:3>):
|
model_identifiers = ctx['<STR_LIT>'][model_class_name]<EOL>num_models = random.randint(min_count, min(max_count, len(model_identifiers)))<EOL>if num_models == <NUM_LIT:0>:<EOL><INDENT>return '<STR_LIT>'<EOL><DEDENT>added = set()<EOL>while len(added) < num_models:<EOL><INDENT>idx = random.randrange(<NUM_LIT:0>, len(model_identifiers))<EOL>added.add(model_identifiers[idx])<EOL><DEDENT>return '<STR_LIT>' % (model_class_name, '<STR_LIT:U+002C>'.join(added))<EOL>
|
Get a random model identifier by class name. Example usage::
# db/fixtures/Tag.yml
{% for i in range(0, 10) %}
tag{{ i }}:
name: {{ faker.name() }}
{% endfor %}
# db/fixtures/Post.yml
a_blog_post:
tags: {{ random_models('Tag') }}
Will render to something like the following::
# db/fixtures/Post.yml (rendered)
a blog_post:
tags: ["Tag(tag2, tag5)"]
:param ctx: The context variables of the current template (passed automatically)
:param model_class_name: The class name of the models to get.
:param min_count: The minimum number of models to return.
:param max_count: The maximum number of models to return.
|
f7100:m3
|
@task<EOL>def install_gitflow():
|
if not run('<STR_LIT>', hide=True, warn=True).ok:<EOL><INDENT>run('<STR_LIT>')<EOL>run('<STR_LIT>')<EOL>run('<STR_LIT>')<EOL><DEDENT>
|
Install git-flow if not found
|
f7104:m0
|
@task<EOL>def next_release(major=False, minor=False, patch=True):
|
import semantic_version<EOL>prev = run('<STR_LIT>', warn=True, hide=True).stdout or '<STR_LIT>'<EOL>ver = semantic_version.Version.coerce(prev.strip())<EOL>if major:<EOL><INDENT>return ver.next_major()<EOL><DEDENT>if minor:<EOL><INDENT>return ver.next_minor()<EOL><DEDENT>if patch:<EOL><INDENT>return ver.next_patch()<EOL><DEDENT>return None<EOL>
|
Get next release version (by major, minor or patch)
|
f7104:m1
|
@task(install_gitflow)<EOL>def start_rel_branch(relver):
|
print('<STR_LIT>', relver)<EOL>run('<STR_LIT>'.format(relver), hide=True)<EOL>
|
Start release branch
|
f7104:m2
|
@task(install_gitflow)<EOL>def finish_rel_branch(relver):
|
print('<STR_LIT>', relver)<EOL>run('<STR_LIT>'.format(ver=relver), hide=True)<EOL>
|
Finish release branch
|
f7104:m3
|
@task<EOL>def package():
|
print('<STR_LIT>')<EOL>run('<STR_LIT>', hide=True)<EOL>
|
Package application for release
|
f7104:m4
|
def _iter_changelog(changelog):
|
first_line = True<EOL>current_release = None<EOL>prev_msg = None<EOL>yield current_release, '<STR_LIT>'<EOL>for hash, tags, msg in changelog:<EOL><INDENT>if prev_msg is None:<EOL><INDENT>prev_msg = msg<EOL><DEDENT>else:<EOL><INDENT>if prev_msg.lower() == msg.lower():<EOL><INDENT>continue<EOL><DEDENT>else:<EOL><INDENT>prev_msg = msg<EOL><DEDENT><DEDENT>if tags:<EOL><INDENT>current_release = max(tags, key=pkg_resources.parse_version)<EOL>underline = len(current_release) * '<STR_LIT:->'<EOL>if not first_line:<EOL><INDENT>yield current_release, '<STR_LIT:\n>'<EOL><DEDENT>yield current_release, (<EOL>'<STR_LIT>' %<EOL>dict(tag=current_release, underline=underline))<EOL><DEDENT>if not msg.startswith('<STR_LIT>'):<EOL><INDENT>if msg.endswith('<STR_LIT:.>'):<EOL><INDENT>msg = msg[:-<NUM_LIT:1>]<EOL><DEDENT>yield current_release, '<STR_LIT>' % dict(msg=msg)<EOL><DEDENT>first_line = False<EOL><DEDENT>
|
Convert a oneline log iterator to formatted strings.
:param changelog: An iterator of one line log entries like
that given by _iter_log_oneline.
:return: An iterator over (release, formatted changelog) tuples.
|
f7104:m5
|
def _iter_log_inner(debug):
|
if debug:<EOL><INDENT>print('<STR_LIT>')<EOL><DEDENT>changelog = run('<STR_LIT>', hide=True).stdout.strip().decode('<STR_LIT:utf-8>', '<STR_LIT:replace>')<EOL>for line in changelog.split('<STR_LIT:\n>'):<EOL><INDENT>line_parts = line.split()<EOL>if len(line_parts) < <NUM_LIT:2>:<EOL><INDENT>continue<EOL><DEDENT>if line_parts[<NUM_LIT:1>].startswith('<STR_LIT:(>') and '<STR_LIT:)>' in line:<EOL><INDENT>msg = line.split('<STR_LIT:)>')[<NUM_LIT:1>].strip()<EOL><DEDENT>else:<EOL><INDENT>msg = '<STR_LIT:U+0020>'.join(line_parts[<NUM_LIT:1>:])<EOL><DEDENT>if '<STR_LIT>' in line:<EOL><INDENT>tags = set([<EOL>tag.split('<STR_LIT:U+002C>')[<NUM_LIT:0>]<EOL>for tag in line.split('<STR_LIT:)>')[<NUM_LIT:0>].split('<STR_LIT>')[<NUM_LIT:1>:]])<EOL><DEDENT>else:<EOL><INDENT>tags = set()<EOL><DEDENT>yield line_parts[<NUM_LIT:0>], tags, msg<EOL><DEDENT>
|
Iterate over --oneline log entries.
This parses the output intro a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
:return: An iterator of (hash, tags_set, 1st_line) tuples.
|
f7104:m6
|
def _iter_log_oneline(debug):
|
return _iter_log_inner(debug)<EOL>
|
Iterate over --oneline log entries if possible.
This parses the output into a structured form but does not apply
presentation logic to the output - making it suitable for different
uses.
|
f7104:m7
|
@task<EOL>def write_changelog(debug=False):
|
changelog = _iter_log_oneline(debug)<EOL>if changelog:<EOL><INDENT>changelog = _iter_changelog(changelog)<EOL><DEDENT>if not changelog:<EOL><INDENT>return<EOL><DEDENT>if debug:<EOL><INDENT>print('<STR_LIT>')<EOL><DEDENT>new_changelog = os.path.join(os.path.curdir, '<STR_LIT>')<EOL>if (os.path.exists(new_changelog)<EOL>and not os.access(new_changelog, os.W_OK)):<EOL><INDENT>return<EOL><DEDENT>with io.open(new_changelog, '<STR_LIT:w>', encoding='<STR_LIT:utf-8>') as changelog_file:<EOL><INDENT>for release, content in changelog:<EOL><INDENT>changelog_file.write(content)<EOL><DEDENT><DEDENT>
|
Write a changelog based on the git changelog.
|
f7104:m8
|
@task<EOL>def prepare_release(ver=None):
|
write_changelog(True)<EOL>if ver is None:<EOL><INDENT>ver = next_release()<EOL><DEDENT>print('<STR_LIT>')<EOL>run('<STR_LIT>'.format(ver), hide=True)<EOL>sha = run('<STR_LIT>', hide=True).stdout<EOL>run('<STR_LIT>'.format(ver=ver, sha=sha), hide=True)<EOL>package()<EOL>write_changelog()<EOL>run('<STR_LIT>'.format(ver), hide=True)<EOL>run('<STR_LIT>', hide=True)<EOL>
|
Prepare release artifacts
|
f7104:m9
|
@task<EOL>def publish(idx=None):
|
if idx is None:<EOL><INDENT>idx = '<STR_LIT>'<EOL><DEDENT>else:<EOL><INDENT>idx = '<STR_LIT>' + idx<EOL><DEDENT>run('<STR_LIT>'.format(idx))<EOL>run('<STR_LIT>'.format(idx))<EOL>
|
Publish packaged distributions to pypi index
|
f7104:m10
|
@task<EOL>def release(major=False, minor=False, patch=True, pypi_index=None):
|
relver = next_release(major, minor, patch)<EOL>start_rel_branch(relver)<EOL>prepare_release(relver)<EOL>finish_rel_branch(relver)<EOL>publish(pypi_index)<EOL>
|
Overall process flow for performing a release
|
f7104:m11
|
@task<EOL>def clean(all=False, docs=False, dist=False, extra=None):
|
run('<STR_LIT>')<EOL>run('<STR_LIT>')<EOL>patterns = ['<STR_LIT>', '<STR_LIT>']<EOL>if all or docs:<EOL><INDENT>patterns.append('<STR_LIT>')<EOL><DEDENT>if all or dist:<EOL><INDENT>patterns.append('<STR_LIT>')<EOL><DEDENT>if extra:<EOL><INDENT>patterns.append(extra)<EOL><DEDENT>for pattern in patterns:<EOL><INDENT>run('<STR_LIT>'.format(pattern))<EOL><DEDENT>
|
Clean up build files
|
f7104:m12
|
def _make_opt_list(opts, group):
|
import copy<EOL>import itertools<EOL>_opts = [(group, list(itertools.chain(*opts)))]<EOL>return [(g, copy.deepcopy(o)) for g, o in _opts]<EOL>
|
Generate a list of tuple containing group, options
:param opts: option lists associated with a group
:type opts: list
:param group: name of an option group
:type group: str
:return: a list of (group_name, opts) tuples
:rtype: list
|
f7111:m0
|
def list_opts():
|
return _make_opt_list([OPTS], '<STR_LIT>')<EOL>
|
Returns a list of oslo_config options available in the library.
The returned list includes all oslo_config options which may be registered
at runtime by the library.
Each element of the list is a tuple. The first element is the name of the
group under which the list of elements in the second element will be
registered. A group name of None corresponds to the [DEFAULT] group in
config files.
The purpose of this is to allow tools like the Oslo sample config file
generator to discover the options exposed to users by this library.
:returns: a list of (group_name, opts) tuples
|
f7111:m1
|
def utcnow():
|
return datetime.datetime.utcnow()<EOL>
|
Gets current time.
:returns: current time from utc
:rtype: :py:obj:`datetime.datetime`
|
f7112:m0
|
def is_older_than(before, delta):
|
return utcnow() - before > delta<EOL>
|
Checks if a datetime is older than delta
:param datetime before: a datetime to check
:param timedelta delta: period of time to compare against
:returns: ``True`` if before is older than time period else ``False``
:rtype: bool
|
f7112:m1
|
def is_newer_than(after, delta):
|
return after - utcnow() > delta<EOL>
|
Checks if a datetime is newer than delta
:param datetime after: a datetime to check
:param timedelta delta: period of time to compare against
:returns: ``True`` if before is newer than time period else ``False``
:rtype: bool
|
f7112:m2
|
def get_client(config_file=None, apikey=None, username=None, userpass=None,<EOL>service_url=None, verify_ssl_certs=None, select_first=None):
|
from oslo_config import cfg<EOL>from tvdbapi_client import api<EOL>if config_file is not None:<EOL><INDENT>cfg.CONF([], default_config_files=[config_file])<EOL><DEDENT>else:<EOL><INDENT>if apikey is not None:<EOL><INDENT>cfg.CONF.set_override('<STR_LIT>', apikey, '<STR_LIT>')<EOL><DEDENT>if username is not None:<EOL><INDENT>cfg.CONF.set_override('<STR_LIT:username>', username, '<STR_LIT>')<EOL><DEDENT>if userpass is not None:<EOL><INDENT>cfg.CONF.set_override('<STR_LIT>', userpass, '<STR_LIT>')<EOL><DEDENT>if service_url is not None:<EOL><INDENT>cfg.CONF.set_override('<STR_LIT>', service_url, '<STR_LIT>')<EOL><DEDENT>if verify_ssl_certs is not None:<EOL><INDENT>cfg.CONF.set_override('<STR_LIT>', verify_ssl_certs, '<STR_LIT>')<EOL><DEDENT>if select_first is not None:<EOL><INDENT>cfg.CONF.set_override('<STR_LIT>', select_first, '<STR_LIT>')<EOL><DEDENT><DEDENT>return api.TVDBClient()<EOL>
|
Configure the API service and creates a new instance of client.
:param str config_file: absolute path to configuration file
:param str apikey: apikey from thetvdb
:param str username: username used on thetvdb
:param str userpass: password used on thetvdb
:param str service_url: the url for thetvdb api service
:param str verify_ssl_certs: flag for validating ssl certs for
service url (https)
:param str select_first: flag for selecting first series from
search results
:returns: tvdbapi client
:rtype: tvdbapi_client.api.TVDBClient
|
f7113:m0
|
def requires_auth(func):
|
@six.wraps(func)<EOL>def wrapper(self, *args, **kwargs):<EOL><INDENT>if self.token_expired:<EOL><INDENT>self.authenticate()<EOL><DEDENT>return func(self, *args, **kwargs)<EOL><DEDENT>return wrapper<EOL>
|
Handle authentication checks.
.. py:decorator:: requires_auth
Checks if the token has expired and performs authentication if needed.
|
f7114:m0
|
def __init__(self, apikey=None, username=None, userpass=None):
|
self.__apikey = apikey or cfg.CONF.tvdb.apikey<EOL>self.__username = username or cfg.CONF.tvdb.username<EOL>self.__userpass = userpass or cfg.CONF.tvdb.userpass<EOL>self.__token = None<EOL>self._token_timer = None<EOL>self._session = None<EOL>self._headers = DEFAULT_HEADERS<EOL>self._language = '<STR_LIT>'<EOL>
|
Create new instance of API client.
:param str apikey: apikey from thetvdb
:param str username: username used on thetvdb
:param str userpass: password used on thetvdb
|
f7114:c0:m0
|
@property<EOL><INDENT>def headers(self):<DEDENT>
|
self._headers.update(**{'<STR_LIT>': self.language})<EOL>if self.__token:<EOL><INDENT>self._headers.update(<EOL>**{'<STR_LIT>': '<STR_LIT>' % self.__token})<EOL><DEDENT>return self._headers<EOL>
|
Provide access to updated headers.
|
f7114:c0:m1
|
@property<EOL><INDENT>def language(self):<DEDENT>
|
return self._language<EOL>
|
Provide access to current language.
|
f7114:c0:m2
|
@language.setter<EOL><INDENT>def language(self, abbr):<DEDENT>
|
self._language = abbr<EOL>
|
Provide access to update language.
|
f7114:c0:m3
|
@property<EOL><INDENT>def token_expired(self):<DEDENT>
|
if self._token_timer is None:<EOL><INDENT>return True<EOL><DEDENT>return timeutil.is_newer_than(self._token_timer, timeutil.ONE_HOUR)<EOL>
|
Provide access to flag indicating if token has expired.
|
f7114:c0:m4
|
@property<EOL><INDENT>def session(self):<DEDENT>
|
if self._session is None:<EOL><INDENT>self._session = cachecontrol.CacheControl(<EOL>requests.Session(),<EOL>cache=caches.FileCache('<STR_LIT>'))<EOL><DEDENT>return self._session<EOL>
|
Provide access to request session with local cache enabled.
|
f7114:c0:m5
|
@exceptions.error_map<EOL><INDENT>def _exec_request(self, service, method=None, path_args=None, data=None,<EOL>params=None):<DEDENT>
|
if path_args is None:<EOL><INDENT>path_args = []<EOL><DEDENT>req = {<EOL>'<STR_LIT>': method or '<STR_LIT>',<EOL>'<STR_LIT:url>': '<STR_LIT:/>'.join(str(a).strip('<STR_LIT:/>') for a in [<EOL>cfg.CONF.tvdb.service_url, service] + path_args),<EOL>'<STR_LIT:data>': json.dumps(data) if data else None,<EOL>'<STR_LIT>': self.headers,<EOL>'<STR_LIT>': params,<EOL>'<STR_LIT>': cfg.CONF.tvdb.verify_ssl_certs,<EOL>}<EOL>LOG.debug('<STR_LIT>', req['<STR_LIT>'], req['<STR_LIT:url>'])<EOL>resp = self.session.request(**req)<EOL>resp.raise_for_status()<EOL>return resp.json() if resp.text else resp.text<EOL>
|
Execute request.
|
f7114:c0:m6
|
def authenticate(self):
|
if self.__token:<EOL><INDENT>try:<EOL><INDENT>resp = self._refresh_token()<EOL><DEDENT>except exceptions.TVDBRequestException as err:<EOL><INDENT>if getattr(err.response, '<STR_LIT>', <NUM_LIT:0>) == <NUM_LIT>:<EOL><INDENT>resp = self._login()<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>resp = self._login()<EOL><DEDENT>self.__token = resp.get('<STR_LIT>')<EOL>self._token_timer = timeutil.utcnow()<EOL>
|
Aquire authorization token for using thetvdb apis.
|
f7114:c0:m9
|
@requires_auth<EOL><INDENT>def search_series(self, **kwargs):<DEDENT>
|
params = {}<EOL>for arg, val in six.iteritems(kwargs):<EOL><INDENT>if arg in SERIES_BY:<EOL><INDENT>params[arg] = val<EOL><DEDENT><DEDENT>resp = self._exec_request(<EOL>'<STR_LIT>', path_args=['<STR_LIT>'], params=params)<EOL>if cfg.CONF.tvdb.select_first:<EOL><INDENT>return resp['<STR_LIT:data>'][<NUM_LIT:0>]<EOL><DEDENT>return resp['<STR_LIT:data>']<EOL>
|
Provide the ability to search for a series.
.. warning::
authorization token required
The following search arguments currently supported:
* name
* imdbId
* zap2itId
:param kwargs: keyword arguments to search for series
:returns: series record or series records
:rtype: dict
|
f7114:c0:m10
|
@requires_auth<EOL><INDENT>def get_series(self, series_id):<DEDENT>
|
return self._exec_request('<STR_LIT>', path_args=[series_id])['<STR_LIT:data>']<EOL>
|
Retrieve series record.
.. warning::
authorization token required
:param str series_id: id of series as found on thetvdb
:returns: series record
:rtype: dict
|
f7114:c0:m11
|
@requires_auth<EOL><INDENT>def get_episodes(self, series_id, **kwargs):<DEDENT>
|
params = {'<STR_LIT>': <NUM_LIT:1>}<EOL>for arg, val in six.iteritems(kwargs):<EOL><INDENT>if arg in EPISODES_BY:<EOL><INDENT>params[arg] = val<EOL><DEDENT><DEDENT>return self._exec_request(<EOL>'<STR_LIT>',<EOL>path_args=[series_id, '<STR_LIT>', '<STR_LIT>'], params=params)['<STR_LIT:data>']<EOL>
|
All episodes for a given series.
Paginated with 100 results per page.
.. warning::
authorization token required
The following search arguments currently supported:
* airedSeason
* airedEpisode
* imdbId
* dvdSeason
* dvdEpisode
* absoluteNumber
* page
:param str series_id: id of series as found on thetvdb
:parm kwargs: keyword args to search/filter episodes by (optional)
:returns: series episode records
:rtype: list
|
f7114:c0:m12
|
@requires_auth<EOL><INDENT>def get_episodes_summary(self, series_id):<DEDENT>
|
return self._exec_request(<EOL>'<STR_LIT>', path_args=[series_id, '<STR_LIT>', '<STR_LIT>'])['<STR_LIT:data>']<EOL>
|
Return a summary of the episodes and seasons for the series.
.. warning::
authorization token required
.. note::
Season "0" is for all episodes that are considered to be specials.
:param str series_id: id of series as found on thetvdb
:returns: summary of the episodes and seasons for the series
:rtype: dict
|
f7114:c0:m13
|
@requires_auth<EOL><INDENT>def get_series_image_info(self, series_id):<DEDENT>
|
return self._exec_request(<EOL>'<STR_LIT>', path_args=[series_id, '<STR_LIT>'])['<STR_LIT:data>']<EOL>
|
Return a summary of the images for a particular series.
.. warning::
authorization token required
:param str series_id: id of series as found on thetvdb
:returns: summary of the images for the series
:rtype: dict
|
f7114:c0:m14
|
@requires_auth<EOL><INDENT>def get_episode(self, episode_id):<DEDENT>
|
return self._exec_request('<STR_LIT>', path_args=[episode_id])['<STR_LIT:data>']<EOL>
|
Return the full information for a given episode id.
.. warning::
authorization token required
:param str episode_id: id of episode as found on thetvdb
:returns: episode record
:rtype: dict
|
f7114:c0:m15
|
def error_map(func):
|
@six.wraps(func)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>try:<EOL><INDENT>return func(*args, **kwargs)<EOL><DEDENT>except exceptions.RequestException as err:<EOL><INDENT>raise TVDBRequestException(<EOL>err,<EOL>response=getattr(err, '<STR_LIT>', None),<EOL>request=getattr(err, '<STR_LIT>', None))<EOL><DEDENT><DEDENT>return wrapper<EOL>
|
Wrap exceptions raised by requests.
.. py:decorator:: error_map
|
f7115:m0
|
def construct(self):
|
def _any(thing, fields=None):<EOL><INDENT>"""<STR_LIT>"""<EOL>ret = None<EOL>if isinstance(thing, QuerySet):<EOL><INDENT>ret = _qs(thing, fields)<EOL><DEDENT>elif isinstance(thing, (tuple, list, set)):<EOL><INDENT>ret = _list(thing, fields)<EOL><DEDENT>elif isinstance(thing, dict):<EOL><INDENT>ret = _dict(thing, fields)<EOL><DEDENT>elif isinstance(thing, decimal.Decimal):<EOL><INDENT>ret = str(thing)<EOL><DEDENT>elif isinstance(thing, Model):<EOL><INDENT>ret = _model(thing, fields)<EOL><DEDENT>elif isinstance(thing, HttpResponse):<EOL><INDENT>raise HttpStatusCode(thing)<EOL><DEDENT>elif inspect.isfunction(thing):<EOL><INDENT>if not inspect.getargspec(thing)[<NUM_LIT:0>]:<EOL><INDENT>ret = _any(thing())<EOL><DEDENT><DEDENT>elif hasattr(thing, '<STR_LIT>'):<EOL><INDENT>f = thing.__emittable__<EOL>if inspect.ismethod(f) and len(inspect.getargspec(f)[<NUM_LIT:0>]) == <NUM_LIT:1>:<EOL><INDENT>ret = _any(f())<EOL><DEDENT><DEDENT>elif repr(thing).startswith("<STR_LIT>"):<EOL><INDENT>ret = _any(thing.all())<EOL><DEDENT>else:<EOL><INDENT>ret = smart_unicode(thing, strings_only=True)<EOL><DEDENT>return ret<EOL><DEDENT>def _fk(data, field):<EOL><INDENT>"""<STR_LIT>"""<EOL>return _any(getattr(data, field.name))<EOL><DEDENT>def _related(data, fields=None):<EOL><INDENT>"""<STR_LIT>"""<EOL>return [ _model(m, fields) for m in data.iterator() ]<EOL><DEDENT>def _m2m(data, field, fields=None):<EOL><INDENT>"""<STR_LIT>"""<EOL>return [ _model(m, fields) for m in getattr(data, field.name).iterator() ]<EOL><DEDENT>def _model(data, fields=None):<EOL><INDENT>"""<STR_LIT>"""<EOL>ret = { }<EOL>handler = self.in_typemapper(type(data), self.anonymous)<EOL>get_absolute_uri = False<EOL>if handler or fields:<EOL><INDENT>v = lambda f: getattr(data, f.attname)<EOL>if handler:<EOL><INDENT>fields = getattr(handler, '<STR_LIT>')<EOL><DEDENT>if not fields or hasattr(handler, '<STR_LIT>'):<EOL><INDENT>"""<STR_LIT>"""<EOL>mapped = self.in_typemapper(type(data), self.anonymous)<EOL>get_fields = set(mapped.fields)<EOL>exclude_fields = set(mapped.exclude).difference(get_fields)<EOL>if '<STR_LIT>' in get_fields:<EOL><INDENT>get_absolute_uri = True<EOL><DEDENT>if not get_fields:<EOL><INDENT>get_fields = set([ f.attname.replace("<STR_LIT>", "<STR_LIT>", <NUM_LIT:1>)<EOL>for f in data._meta.fields + data._meta.virtual_fields])<EOL><DEDENT>if hasattr(mapped, '<STR_LIT>'):<EOL><INDENT>get_fields.update(mapped.extra_fields)<EOL><DEDENT>for exclude in exclude_fields:<EOL><INDENT>if isinstance(exclude, str):<EOL><INDENT>get_fields.discard(exclude)<EOL><DEDENT>elif isinstance(exclude, re._pattern_type):<EOL><INDENT>for field in get_fields.copy():<EOL><INDENT>if exclude.match(field):<EOL><INDENT>get_fields.discard(field)<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>get_fields = set(fields)<EOL><DEDENT>met_fields = self.method_fields(handler, get_fields)<EOL>for f in data._meta.local_fields + data._meta.virtual_fields:<EOL><INDENT>if f.serialize and not any([ p in met_fields for p in [ f.attname, f.name ]]):<EOL><INDENT>if not f.rel:<EOL><INDENT>if f.attname in get_fields:<EOL><INDENT>ret[f.attname] = _any(v(f))<EOL>get_fields.remove(f.attname)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if f.attname[:-<NUM_LIT:3>] in get_fields:<EOL><INDENT>ret[f.name] = _fk(data, f)<EOL>get_fields.remove(f.name)<EOL><DEDENT><DEDENT><DEDENT><DEDENT>for mf in data._meta.many_to_many:<EOL><INDENT>if mf.serialize and mf.attname not in met_fields:<EOL><INDENT>if mf.attname in get_fields:<EOL><INDENT>ret[mf.name] = _m2m(data, mf)<EOL>get_fields.remove(mf.name)<EOL><DEDENT><DEDENT><DEDENT>for maybe_field in get_fields:<EOL><INDENT>if isinstance(maybe_field, (list, tuple)):<EOL><INDENT>model, fields = maybe_field<EOL>inst = getattr(data, model, None)<EOL>if inst:<EOL><INDENT>if hasattr(inst, '<STR_LIT:all>'):<EOL><INDENT>ret[model] = _related(inst, fields)<EOL><DEDENT>elif callable(inst):<EOL><INDENT>if len(inspect.getargspec(inst)[<NUM_LIT:0>]) == <NUM_LIT:1>:<EOL><INDENT>ret[model] = _any(inst(), fields)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>ret[model] = _model(inst, fields)<EOL><DEDENT><DEDENT><DEDENT>elif maybe_field in met_fields:<EOL><INDENT>ret[maybe_field] = _any(met_fields[maybe_field](data))<EOL><DEDENT>else:<EOL><INDENT>maybe = getattr(data, maybe_field, None)<EOL>if maybe is not None:<EOL><INDENT>if callable(maybe):<EOL><INDENT>if len(inspect.getargspec(maybe)[<NUM_LIT:0>]) <= <NUM_LIT:1>:<EOL><INDENT>ret[maybe_field] = _any(maybe())<EOL><DEDENT><DEDENT>else:<EOL><INDENT>ret[maybe_field] = _any(maybe)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>handler_f = getattr(handler or self.handler, maybe_field, None)<EOL>if handler_f:<EOL><INDENT>ret[maybe_field] = _any(handler_f(data))<EOL><DEDENT><DEDENT><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>for f in data._meta.fields:<EOL><INDENT>ret[f.attname] = _any(getattr(data, f.attname))<EOL><DEDENT>fields = dir(data.__class__) + list(ret.keys())<EOL>add_ons = [k for k in dir(data) if k not in fields]<EOL>for k in add_ons:<EOL><INDENT>ret[k] = _any(getattr(data, k))<EOL><DEDENT><DEDENT>if self.in_typemapper(type(data), self.anonymous):<EOL><INDENT>handler = self.in_typemapper(type(data), self.anonymous)<EOL>if hasattr(handler, '<STR_LIT>'):<EOL><INDENT>url_id, fields = handler.resource_uri(data)<EOL>try:<EOL><INDENT>ret['<STR_LIT>'] = permalink(lambda: (url_id, fields))()<EOL><DEDENT>except NoReverseMatch as e:<EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT>if hasattr(data, '<STR_LIT>') and '<STR_LIT>' not in ret:<EOL><INDENT>try:<EOL><INDENT>ret['<STR_LIT>'] = data.get_api_url()<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>if hasattr(data, '<STR_LIT>') and get_absolute_uri:<EOL><INDENT>try:<EOL><INDENT>ret['<STR_LIT>'] = data.get_absolute_url()<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>return ret<EOL><DEDENT>def _qs(data, fields=None):<EOL><INDENT>"""<STR_LIT>"""<EOL>return [_any(v, fields) for v in data ]<EOL><DEDENT>def _list(data, fields=None):<EOL><INDENT>"""<STR_LIT>"""<EOL>return [_any(v, fields) for v in data ]<EOL><DEDENT>def _dict(data, fields=None):<EOL><INDENT>"""<STR_LIT>"""<EOL>return dict([(k, _any(v, fields)) for k, v in data.items()])<EOL><DEDENT>return _any(self.data, self.fields)<EOL>
|
Recursively serialize a lot of types, and
in cases where it doesn't recognize the type,
it will fall back to Django's `smart_unicode`.
Returns `dict`.
|
f7123:c0:m2
|
def render(self):
|
raise NotImplementedError("<STR_LIT>")<EOL>
|
This super emitter does not implement `render`,
this is a job for the specific emitter below.
|
f7123:c0:m4
|
def stream_render(self, request, stream=True):
|
yield self.render(request)<EOL>
|
Tells our patched middleware not to look
at the contents, and returns a generator
rather than the buffered string. Should be
more memory friendly for large datasets.
|
f7123:c0:m5
|
@classmethod<EOL><INDENT>def get(cls, format):<DEDENT>
|
if format in cls.EMITTERS:<EOL><INDENT>return cls.EMITTERS.get(format)<EOL><DEDENT>raise ValueError("<STR_LIT>" % format)<EOL>
|
Gets an emitter, returns the class and a content-type.
|
f7123:c0:m6
|
@classmethod<EOL><INDENT>def register(cls, name, klass, content_type='<STR_LIT>'):<DEDENT>
|
cls.EMITTERS[name] = (klass, content_type)<EOL>
|
Register an emitter.
Parameters::
- `name`: The name of the emitter ('json', 'xml', 'yaml', ...)
- `klass`: The emitter class.
- `content_type`: The content type to serve response as.
|
f7123:c0:m7
|
@classmethod<EOL><INDENT>def unregister(cls, name):<DEDENT>
|
return cls.EMITTERS.pop(name, None)<EOL>
|
Remove an emitter from the registry. Useful if you don't
want to provide output in one of the built-in emitters.
|
f7123:c0:m8
|
def determine_emitter(self, request, *args, **kwargs):
|
em = kwargs.pop('<STR_LIT>', None)<EOL>if not em:<EOL><INDENT>em = request.GET.get('<STR_LIT>', '<STR_LIT>')<EOL><DEDENT>return em<EOL>
|
Function for determening which emitter to use
for output. It lives here so you can easily subclass
`Resource` in order to change how emission is detected.
You could also check for the `Accept` HTTP header here,
since that pretty much makes sense. Refer to `Mimer` for
that as well.
|
f7123:c3:m1
|
def form_validation_response(self, e):
|
resp = rc.BAD_REQUEST<EOL>resp.write('<STR_LIT:U+0020>'+str(e.form.errors))<EOL>return resp<EOL>
|
Method to return form validation error information.
You will probably want to override this in your own
`Resource` subclass.
|
f7123:c3:m2
|
@property<EOL><INDENT>def anonymous(self):<DEDENT>
|
if hasattr(self.handler, '<STR_LIT>'):<EOL><INDENT>anon = self.handler.anonymous<EOL>if callable(anon):<EOL><INDENT>return anon<EOL><DEDENT>for klass in list(typemapper.keys()):<EOL><INDENT>if anon == klass.__name__:<EOL><INDENT>return klass<EOL><DEDENT><DEDENT><DEDENT>return None<EOL>
|
Gets the anonymous handler. Also tries to grab a class
if the `anonymous` value is a string, so that we can define
anonymous handlers that aren't defined yet (like, when
you're subclassing your basehandler into an anonymous one.)
|
f7123:c3:m3
|
@vary_on_headers('<STR_LIT>')<EOL><INDENT>def __call__(self, request, *args, **kwargs):<DEDENT>
|
rm = request.method.upper()<EOL>if rm == "<STR_LIT>":<EOL><INDENT>coerce_put_post(request)<EOL><DEDENT>actor, anonymous = self.authenticate(request, rm)<EOL>if anonymous is CHALLENGE:<EOL><INDENT>return actor()<EOL><DEDENT>else:<EOL><INDENT>handler = actor<EOL><DEDENT>if rm in ('<STR_LIT:POST>', '<STR_LIT>'):<EOL><INDENT>try:<EOL><INDENT>translate_mime(request)<EOL><DEDENT>except MimerDataException:<EOL><INDENT>return rc.BAD_REQUEST<EOL><DEDENT>if not hasattr(request, '<STR_LIT:data>'):<EOL><INDENT>if rm == '<STR_LIT:POST>':<EOL><INDENT>request.data = request.POST<EOL><DEDENT>else:<EOL><INDENT>request.data = request.PUT<EOL><DEDENT><DEDENT><DEDENT>if not rm in handler.allowed_methods:<EOL><INDENT>return HttpResponseNotAllowed(handler.allowed_methods)<EOL><DEDENT>meth = getattr(handler, self.callmap.get(rm, '<STR_LIT>'), None)<EOL>if not meth:<EOL><INDENT>raise Http404<EOL><DEDENT>em_format = self.determine_emitter(request, *args, **kwargs)<EOL>kwargs.pop('<STR_LIT>', None)<EOL>request = self.cleanup_request(request)<EOL>try:<EOL><INDENT>result = meth(request, *args, **kwargs)<EOL><DEDENT>except Exception as e:<EOL><INDENT>result = self.error_handler(e, request, meth, em_format)<EOL><DEDENT>try:<EOL><INDENT>emitter, ct = Emitter.get(em_format)<EOL>fields = handler.fields<EOL>if hasattr(handler, '<STR_LIT>') and isinstance(result, (list, tuple, QuerySet)):<EOL><INDENT>fields = handler.list_fields<EOL><DEDENT><DEDENT>except ValueError:<EOL><INDENT>result = rc.BAD_REQUEST<EOL>result.content = "<STR_LIT>" % em_format<EOL>return result<EOL><DEDENT>status_code = <NUM_LIT:200><EOL>if isinstance(result, HttpResponse) and not result._is_string:<EOL><INDENT>status_code = result.status_code<EOL>result = result._container<EOL><DEDENT>srl = emitter(result, typemapper, handler, fields, anonymous)<EOL>try:<EOL><INDENT>"""<STR_LIT>"""<EOL>if self.stream: stream = srl.stream_render(request)<EOL>else: stream = srl.render(request)<EOL>if not isinstance(stream, HttpResponse):<EOL><INDENT>resp = HttpResponse(stream, mimetype=ct, status=status_code)<EOL><DEDENT>else:<EOL><INDENT>resp = stream<EOL><DEDENT>resp.streaming = self.stream<EOL>return resp<EOL><DEDENT>except HttpStatusCode as e:<EOL><INDENT>return e.response<EOL><DEDENT>
|
NB: Sends a `Vary` header so we don't cache requests
that are different (OAuth stuff in `Authorization` header.)
|
f7123:c3:m5
|
@staticmethod<EOL><INDENT>def cleanup_request(request):<DEDENT>
|
for method_type in ('<STR_LIT:GET>', '<STR_LIT>', '<STR_LIT:POST>', '<STR_LIT>'):<EOL><INDENT>block = getattr(request, method_type, { })<EOL>if True in [ k.startswith("<STR_LIT>") for k in list(block.keys()) ]:<EOL><INDENT>sanitized = block.copy()<EOL>for k in list(sanitized.keys()):<EOL><INDENT>if k.startswith("<STR_LIT>"):<EOL><INDENT>sanitized.pop(k)<EOL><DEDENT><DEDENT>setattr(request, method_type, sanitized)<EOL><DEDENT><DEDENT>return request<EOL>
|
Removes `oauth_` keys from various dicts on the
request object, and returns the sanitized version.
|
f7123:c3:m6
|
def error_handler(self, e, request, meth, em_format):
|
if isinstance(e, FormValidationError):<EOL><INDENT>return self.form_validation_response(e)<EOL><DEDENT>elif isinstance(e, TypeError):<EOL><INDENT>result = rc.BAD_REQUEST<EOL>hm = HandlerMethod(meth)<EOL>sig = hm.signature<EOL>msg = '<STR_LIT>'<EOL>if sig:<EOL><INDENT>msg += '<STR_LIT>' % sig<EOL><DEDENT>else:<EOL><INDENT>msg += '<STR_LIT>'<EOL><DEDENT>if self.display_errors:<EOL><INDENT>msg += '<STR_LIT>' % str(e)<EOL><DEDENT>result.content = format_error(msg)<EOL>return result<EOL><DEDENT>elif isinstance(e, Http404):<EOL><INDENT>return rc.NOT_FOUND<EOL><DEDENT>elif isinstance(e, HttpStatusCode):<EOL><INDENT>return e.response<EOL><DEDENT>else: <EOL><INDENT>"""<STR_LIT>"""<EOL>exc_type, exc_value, tb = sys.exc_info()<EOL>rep = ExceptionReporter(request, exc_type, exc_value, tb.tb_next)<EOL>if self.email_errors:<EOL><INDENT>self.email_exception(rep)<EOL><DEDENT>if self.display_errors:<EOL><INDENT>return HttpResponseServerError(<EOL>format_error('<STR_LIT:\n>'.join(rep.format_exception())))<EOL><DEDENT>else:<EOL><INDENT>raise<EOL><DEDENT><DEDENT>
|
Override this method to add handling of errors customized for your
needs
|
f7123:c3:m8
|
def address_inline(request, prefix="<STR_LIT>", country_code=None, template_name="<STR_LIT>"):
|
country_prefix = "<STR_LIT>"<EOL>prefix = request.POST.get('<STR_LIT>', prefix)<EOL>if prefix:<EOL><INDENT>country_prefix = prefix + '<STR_LIT>'<EOL><DEDENT>country_code = request.POST.get(country_prefix, country_code)<EOL>form_class = form_factory(country_code=country_code)<EOL>if request.method == "<STR_LIT:POST>":<EOL><INDENT>data = {}<EOL>for (key, val) in request.POST.items():<EOL><INDENT>if val is not None and len(val) > <NUM_LIT:0>:<EOL><INDENT>data[key] = val<EOL><DEDENT><DEDENT>data.update({country_prefix: country_code})<EOL>form = form_class(prefix=prefix, initial=data)<EOL><DEDENT>else:<EOL><INDENT>form = form_class(prefix=prefix)<EOL><DEDENT>return render_to_string(template_name, RequestContext(request, {<EOL>"<STR_LIT>": form,<EOL>"<STR_LIT>": prefix,<EOL>}))<EOL>
|
Displays postal address with localized fields
|
f7124:m0
|
def coerce_put_post(request):
|
if request.method == "<STR_LIT>":<EOL><INDENT>if hasattr(request, '<STR_LIT>'):<EOL><INDENT>del request._post<EOL>del request._files<EOL><DEDENT>try:<EOL><INDENT>request.method = "<STR_LIT:POST>"<EOL>request._load_post_and_files()<EOL>request.method = "<STR_LIT>"<EOL><DEDENT>except AttributeError:<EOL><INDENT>request.META['<STR_LIT>'] = '<STR_LIT:POST>'<EOL>request._load_post_and_files()<EOL>request.META['<STR_LIT>'] = '<STR_LIT>'<EOL><DEDENT>request.PUT = request.POST<EOL><DEDENT>
|
Django doesn't particularly understand REST.
In case we send data over PUT, Django won't
actually look at the data and load it. We need
to twist its arm here.
The try/except abominiation here is due to a bug
in mod_python. This should fix it.
|
f7143:m1
|
def __getattr__(self, attr):
|
try:<EOL><INDENT>(r, c) = self.CODES.get(attr)<EOL><DEDENT>except TypeError:<EOL><INDENT>raise AttributeError(attr)<EOL><DEDENT>class HttpResponseWrapper(HttpResponse):<EOL><INDENT>"""<STR_LIT>"""<EOL>def _set_content(self, content):<EOL><INDENT>"""<STR_LIT>"""<EOL>if not isinstance(content, basestring) and hasattr(content, '<STR_LIT>'):<EOL><INDENT>self._container = content<EOL>self._is_string = False<EOL><DEDENT>else:<EOL><INDENT>self._container = [content]<EOL>self._is_string = True<EOL><DEDENT><DEDENT>content = property(HttpResponse._get_content, _set_content) <EOL><DEDENT>return HttpResponseWrapper(r, content_type='<STR_LIT>', status=c)<EOL>
|
Returns a fresh `HttpResponse` when getting
an "attribute". This is backwards compatible
with 0.2, which is important.
|
f7143:c0:m0
|
def loader_for_type(self, ctype):
|
for loadee, mimes in Mimer.TYPES.iteritems():<EOL><INDENT>for mime in mimes:<EOL><INDENT>if ctype.startswith(mime):<EOL><INDENT>return loadee<EOL><DEDENT><DEDENT><DEDENT>
|
Gets a function ref to deserialize content
for a certain mimetype.
|
f7143:c4:m2
|
def content_type(self):
|
type_formencoded = "<STR_LIT>"<EOL>ctype = self.request.META.get('<STR_LIT>', type_formencoded)<EOL>if type_formencoded in ctype:<EOL><INDENT>return None<EOL><DEDENT>return ctype<EOL>
|
Returns the content type of the request in all cases where it is
different than a submitted form - application/x-www-form-urlencoded
|
f7143:c4:m3
|
def translate(self):
|
ctype = self.content_type()<EOL>self.request.content_type = ctype<EOL>if not self.is_multipart() and ctype:<EOL><INDENT>loadee = self.loader_for_type(ctype)<EOL>if loadee:<EOL><INDENT>try:<EOL><INDENT>self.request.data = loadee(self.request.raw_post_data)<EOL>self.request.POST = self.request.PUT = dict()<EOL><DEDENT>except (TypeError, ValueError):<EOL><INDENT>raise MimerDataException<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.request.data = None<EOL><DEDENT><DEDENT>return self.request<EOL>
|
Will look at the `Content-type` sent by the client, and maybe
deserialize the contents into the format they sent. This will
work for JSON, YAML, XML and Pickle. Since the data is not just
key-value (and maybe just a list), the data will be placed on
`request.data` instead, and the handler will have to read from
there.
It will also set `request.content_type` so the handler has an easy
way to tell what's going on. `request.content_type` will always be
None for form-encoded and/or multipart form data (what your browser sends.)
|
f7143:c4:m4
|
def service_factory(prefix, base):
|
name = prefix.title() + base.__name__<EOL>cls = type(name, (base,), {'<STR_LIT:name>': prefix})<EOL>return cls<EOL>
|
Test utility to create subclasses of the above ServiceHandler classes
based on a prefix and base. The prefix is set as the ``name`` attribute
on the resulting type.
e.g. ``service_factory("foo", ServicePoolHandler)`` returns a type
called ``FooServicePoolHandler`` that inherits from ``ServicePoolHandler``,
and ``FooServicePoolHandler.name`` is ``"foo"``.
|
f7150:m4
|
@rpc<EOL><INDENT>def proxy(self, method, *args):<DEDENT>
|
getattr(self.rpcproxy, method)(*args)<EOL>
|
Proxies RPC calls to ``method`` on itself, so we can test handling
of errors in remote services.
|
f7164:c1:m3
|
def iter_extensions(extension):
|
for _, ext in inspect.getmembers(extension, is_extension):<EOL><INDENT>for item in iter_extensions(ext):<EOL><INDENT>yield item<EOL><DEDENT>yield ext<EOL><DEDENT>
|
Depth-first iterator over sub-extensions on `extension`.
|
f7192:m4
|
def setup(self):
|
Called on bound Extensions before the container starts.
Extensions should do any required initialisation here.
|
f7192:c0:m1
|
|
def start(self):
|
Called on bound Extensions when the container has successfully
started.
This is only called after all other Extensions have successfully
returned from :meth:`Extension.setup`. If the Extension reacts
to external events, it should now start acting upon them.
|
f7192:c0:m2
|
|
def stop(self):
|
Called when the service container begins to shut down.
Extensions should do any graceful shutdown here.
|
f7192:c0:m3
|
|
def kill(self):
|
Called to stop this extension without grace.
Extensions should urgently shut down here. This means
stopping as soon as possible by omitting cleanup.
This may be distinct from ``stop()`` for certain dependencies.
For example, :class:`~messaging.QueueConsumer` tracks messages being
processed and pending message acks. Its ``kill`` implementation
discards these and disconnects from rabbit as soon as possible.
Extensions should not raise during kill, since the container
is already dying. Instead they should log what is appropriate and
swallow the exception to allow the container kill to continue.
|
f7192:c0:m4
|
|
def bind(self, container):
|
def clone(prototype):<EOL><INDENT>if prototype.is_bound():<EOL><INDENT>raise RuntimeError('<STR_LIT>')<EOL><DEDENT>cls = type(prototype)<EOL>args, kwargs = prototype.__params<EOL>instance = cls(*args, **kwargs)<EOL>instance.container = weakref.proxy(container)<EOL>return instance<EOL><DEDENT>instance = clone(self)<EOL>for name, ext in inspect.getmembers(self, is_extension):<EOL><INDENT>setattr(instance, name, ext.bind(container))<EOL><DEDENT>return instance<EOL>
|
Get an instance of this Extension to bind to `container`.
|
f7192:c0:m5
|
def bind(self, container):
|
<EOL>shared = container.shared_extensions.get(self.sharing_key)<EOL>if shared:<EOL><INDENT>return shared<EOL><DEDENT>instance = super(SharedExtension, self).bind(container)<EOL>container.shared_extensions[self.sharing_key] = instance<EOL>return instance<EOL>
|
Bind implementation that supports sharing.
|
f7192:c1:m1
|
def bind(self, container, attr_name):
|
instance = super(DependencyProvider, self).bind(container)<EOL>instance.attr_name = attr_name<EOL>self.attr_name = attr_name<EOL>return instance<EOL>
|
Get an instance of this Dependency to bind to `container` with
`attr_name`.
|
f7192:c2:m0
|
def get_dependency(self, worker_ctx):
|
Called before worker execution. A DependencyProvider should return
an object to be injected into the worker instance by the container.
|
f7192:c2:m1
|
|
def worker_result(self, worker_ctx, result=None, exc_info=None):
|
Called with the result of a service worker execution.
Dependencies that need to process the result should do it here.
This method is called for all `Dependency` instances on completion
of any worker.
Example: a database session dependency may flush the transaction
:Parameters:
worker_ctx : WorkerContext
See ``nameko.containers.ServiceContainer.spawn_worker``
|
f7192:c2:m2
|
|
def worker_setup(self, worker_ctx):
|
Called before a service worker executes a task.
Dependencies should do any pre-processing here, raising exceptions
in the event of failure.
Example: ...
:Parameters:
worker_ctx : WorkerContext
See ``nameko.containers.ServiceContainer.spawn_worker``
|
f7192:c2:m3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.