id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
240,200
fdb/aufmachen
aufmachen/websites/immoweb.py
parse_number
def parse_number(d, key, regex, s): """Find a number using a given regular expression. If the number is found, sets it under the key in the given dictionary. d - The dictionary that will contain the data. key - The key into the dictionary. regex - A string containing the regular expression. s - The string to search. """ result = find_number(regex, s) if result is not None: d[key] = result
python
def parse_number(d, key, regex, s): """Find a number using a given regular expression. If the number is found, sets it under the key in the given dictionary. d - The dictionary that will contain the data. key - The key into the dictionary. regex - A string containing the regular expression. s - The string to search. """ result = find_number(regex, s) if result is not None: d[key] = result
[ "def", "parse_number", "(", "d", ",", "key", ",", "regex", ",", "s", ")", ":", "result", "=", "find_number", "(", "regex", ",", "s", ")", "if", "result", "is", "not", "None", ":", "d", "[", "key", "]", "=", "result" ]
Find a number using a given regular expression. If the number is found, sets it under the key in the given dictionary. d - The dictionary that will contain the data. key - The key into the dictionary. regex - A string containing the regular expression. s - The string to search.
[ "Find", "a", "number", "using", "a", "given", "regular", "expression", ".", "If", "the", "number", "is", "found", "sets", "it", "under", "the", "key", "in", "the", "given", "dictionary", ".", "d", "-", "The", "dictionary", "that", "will", "contain", "the", "data", ".", "key", "-", "The", "key", "into", "the", "dictionary", ".", "regex", "-", "A", "string", "containing", "the", "regular", "expression", ".", "s", "-", "The", "string", "to", "search", "." ]
f2986a0cf087ac53969f82b84d872e3f1c6986f4
https://github.com/fdb/aufmachen/blob/f2986a0cf087ac53969f82b84d872e3f1c6986f4/aufmachen/websites/immoweb.py#L171-L182
240,201
casebeer/factual
factual/common/category_helpers.py
make_category_filter
def make_category_filter(categories, blank=True): ''' Generates a dict representing a Factual filter matching any of the categories passed. The resulting filter uses $bw "begins with" operators to return all matching subcategories. Because of this, passing a top level category removes the need to pass any of its subcategories. Conversely, specifying subcategories will not restrict results as expected if a prefix of those subcategories is also provided. For example: make_category_filter(["Food & Beverage", "Food & Beverage > Cheese"]) is the same as make_category_filter(["Food & Beverage"]) To minimize the size of the filters sent to Factual, make_category_filters identifies redundant subcategories and removes them. Note that because of this prefix matching, queries may return rows from unwanted subcategories. It may be necessary for you to filter out these records after the Factual request. Specify blank=True to include items without a category set. ''' categories = [category.strip() for category in categories] # find shortest prefixes categories.sort() redundant_categories = set() prefix_candidate = None for category in categories: if prefix_candidate != None \ and category.find(prefix_candidate) == 0: # prefix_candidate is a prefix of the current category, # so we can skip the current category redundant_categories.add(category) else: prefix_candidate = category categories = [category for category in categories if category not in redundant_categories] filters = [ops.bw_("category", category) for category in categories] if blank: filters.append(ops.blank_("category")) return ops.or_(*filters)
python
def make_category_filter(categories, blank=True): ''' Generates a dict representing a Factual filter matching any of the categories passed. The resulting filter uses $bw "begins with" operators to return all matching subcategories. Because of this, passing a top level category removes the need to pass any of its subcategories. Conversely, specifying subcategories will not restrict results as expected if a prefix of those subcategories is also provided. For example: make_category_filter(["Food & Beverage", "Food & Beverage > Cheese"]) is the same as make_category_filter(["Food & Beverage"]) To minimize the size of the filters sent to Factual, make_category_filters identifies redundant subcategories and removes them. Note that because of this prefix matching, queries may return rows from unwanted subcategories. It may be necessary for you to filter out these records after the Factual request. Specify blank=True to include items without a category set. ''' categories = [category.strip() for category in categories] # find shortest prefixes categories.sort() redundant_categories = set() prefix_candidate = None for category in categories: if prefix_candidate != None \ and category.find(prefix_candidate) == 0: # prefix_candidate is a prefix of the current category, # so we can skip the current category redundant_categories.add(category) else: prefix_candidate = category categories = [category for category in categories if category not in redundant_categories] filters = [ops.bw_("category", category) for category in categories] if blank: filters.append(ops.blank_("category")) return ops.or_(*filters)
[ "def", "make_category_filter", "(", "categories", ",", "blank", "=", "True", ")", ":", "categories", "=", "[", "category", ".", "strip", "(", ")", "for", "category", "in", "categories", "]", "# find shortest prefixes", "categories", ".", "sort", "(", ")", "redundant_categories", "=", "set", "(", ")", "prefix_candidate", "=", "None", "for", "category", "in", "categories", ":", "if", "prefix_candidate", "!=", "None", "and", "category", ".", "find", "(", "prefix_candidate", ")", "==", "0", ":", "# prefix_candidate is a prefix of the current category, ", "# so we can skip the current category", "redundant_categories", ".", "add", "(", "category", ")", "else", ":", "prefix_candidate", "=", "category", "categories", "=", "[", "category", "for", "category", "in", "categories", "if", "category", "not", "in", "redundant_categories", "]", "filters", "=", "[", "ops", ".", "bw_", "(", "\"category\"", ",", "category", ")", "for", "category", "in", "categories", "]", "if", "blank", ":", "filters", ".", "append", "(", "ops", ".", "blank_", "(", "\"category\"", ")", ")", "return", "ops", ".", "or_", "(", "*", "filters", ")" ]
Generates a dict representing a Factual filter matching any of the categories passed. The resulting filter uses $bw "begins with" operators to return all matching subcategories. Because of this, passing a top level category removes the need to pass any of its subcategories. Conversely, specifying subcategories will not restrict results as expected if a prefix of those subcategories is also provided. For example: make_category_filter(["Food & Beverage", "Food & Beverage > Cheese"]) is the same as make_category_filter(["Food & Beverage"]) To minimize the size of the filters sent to Factual, make_category_filters identifies redundant subcategories and removes them. Note that because of this prefix matching, queries may return rows from unwanted subcategories. It may be necessary for you to filter out these records after the Factual request. Specify blank=True to include items without a category set.
[ "Generates", "a", "dict", "representing", "a", "Factual", "filter", "matching", "any", "of", "the", "categories", "passed", "." ]
f2795a8c9fd447c5d62887ae0f960481ce13be84
https://github.com/casebeer/factual/blob/f2795a8c9fd447c5d62887ae0f960481ce13be84/factual/common/category_helpers.py#L11-L54
240,202
ANCIR/granoloader
granoloader/mapping.py
ObjectMapper.convert_type
def convert_type(self, value, spec): """ Some well-educated format guessing. """ data_type = spec.get('type', 'string').lower().strip() if data_type in ['bool', 'boolean']: return value.lower() in BOOL_TRUISH elif data_type in ['int', 'integer']: try: return int(value) except (ValueError, TypeError): return None elif data_type in ['float', 'decimal', 'real']: try: return float(value) except (ValueError, TypeError): return None elif data_type in ['date', 'datetime', 'timestamp']: if 'format' in spec: format_list = self._get_date_format_list(spec.get('format')) if format_list is None: raise MappingException( '%s format mapping is not valid: %r' % (spec.get('column'), spec.get('format')) ) for format, precision in format_list: try: return {'value': datetime.strptime(value, format), 'value_precision': precision} except (ValueError, TypeError): pass return None else: try: return parser.parse(value) except (ValueError, TypeError): return None elif data_type == 'file': try: return self._get_file(value) except: raise return value
python
def convert_type(self, value, spec): """ Some well-educated format guessing. """ data_type = spec.get('type', 'string').lower().strip() if data_type in ['bool', 'boolean']: return value.lower() in BOOL_TRUISH elif data_type in ['int', 'integer']: try: return int(value) except (ValueError, TypeError): return None elif data_type in ['float', 'decimal', 'real']: try: return float(value) except (ValueError, TypeError): return None elif data_type in ['date', 'datetime', 'timestamp']: if 'format' in spec: format_list = self._get_date_format_list(spec.get('format')) if format_list is None: raise MappingException( '%s format mapping is not valid: %r' % (spec.get('column'), spec.get('format')) ) for format, precision in format_list: try: return {'value': datetime.strptime(value, format), 'value_precision': precision} except (ValueError, TypeError): pass return None else: try: return parser.parse(value) except (ValueError, TypeError): return None elif data_type == 'file': try: return self._get_file(value) except: raise return value
[ "def", "convert_type", "(", "self", ",", "value", ",", "spec", ")", ":", "data_type", "=", "spec", ".", "get", "(", "'type'", ",", "'string'", ")", ".", "lower", "(", ")", ".", "strip", "(", ")", "if", "data_type", "in", "[", "'bool'", ",", "'boolean'", "]", ":", "return", "value", ".", "lower", "(", ")", "in", "BOOL_TRUISH", "elif", "data_type", "in", "[", "'int'", ",", "'integer'", "]", ":", "try", ":", "return", "int", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "None", "elif", "data_type", "in", "[", "'float'", ",", "'decimal'", ",", "'real'", "]", ":", "try", ":", "return", "float", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "None", "elif", "data_type", "in", "[", "'date'", ",", "'datetime'", ",", "'timestamp'", "]", ":", "if", "'format'", "in", "spec", ":", "format_list", "=", "self", ".", "_get_date_format_list", "(", "spec", ".", "get", "(", "'format'", ")", ")", "if", "format_list", "is", "None", ":", "raise", "MappingException", "(", "'%s format mapping is not valid: %r'", "%", "(", "spec", ".", "get", "(", "'column'", ")", ",", "spec", ".", "get", "(", "'format'", ")", ")", ")", "for", "format", ",", "precision", "in", "format_list", ":", "try", ":", "return", "{", "'value'", ":", "datetime", ".", "strptime", "(", "value", ",", "format", ")", ",", "'value_precision'", ":", "precision", "}", "except", "(", "ValueError", ",", "TypeError", ")", ":", "pass", "return", "None", "else", ":", "try", ":", "return", "parser", ".", "parse", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "None", "elif", "data_type", "==", "'file'", ":", "try", ":", "return", "self", ".", "_get_file", "(", "value", ")", "except", ":", "raise", "return", "value" ]
Some well-educated format guessing.
[ "Some", "well", "-", "educated", "format", "guessing", "." ]
c48b1bd50403dd611340c5f51637f7c5ca54059c
https://github.com/ANCIR/granoloader/blob/c48b1bd50403dd611340c5f51637f7c5ca54059c/granoloader/mapping.py#L31-L71
240,203
ANCIR/granoloader
granoloader/mapping.py
ObjectMapper.get_value
def get_value(self, spec, row): """ Returns the value or a dict with a 'value' entry plus extra fields. """ column = spec.get('column') default = spec.get('default') if column is None: if default is not None: return self.convert_type(default, spec) return value = row.get(column) if is_empty(value): if default is not None: return self.convert_type(default, spec) return None return self.convert_type(value, spec)
python
def get_value(self, spec, row): """ Returns the value or a dict with a 'value' entry plus extra fields. """ column = spec.get('column') default = spec.get('default') if column is None: if default is not None: return self.convert_type(default, spec) return value = row.get(column) if is_empty(value): if default is not None: return self.convert_type(default, spec) return None return self.convert_type(value, spec)
[ "def", "get_value", "(", "self", ",", "spec", ",", "row", ")", ":", "column", "=", "spec", ".", "get", "(", "'column'", ")", "default", "=", "spec", ".", "get", "(", "'default'", ")", "if", "column", "is", "None", ":", "if", "default", "is", "not", "None", ":", "return", "self", ".", "convert_type", "(", "default", ",", "spec", ")", "return", "value", "=", "row", ".", "get", "(", "column", ")", "if", "is_empty", "(", "value", ")", ":", "if", "default", "is", "not", "None", ":", "return", "self", ".", "convert_type", "(", "default", ",", "spec", ")", "return", "None", "return", "self", ".", "convert_type", "(", "value", ",", "spec", ")" ]
Returns the value or a dict with a 'value' entry plus extra fields.
[ "Returns", "the", "value", "or", "a", "dict", "with", "a", "value", "entry", "plus", "extra", "fields", "." ]
c48b1bd50403dd611340c5f51637f7c5ca54059c
https://github.com/ANCIR/granoloader/blob/c48b1bd50403dd611340c5f51637f7c5ca54059c/granoloader/mapping.py#L109-L122
240,204
ANCIR/granoloader
granoloader/mapping.py
ObjectMapper.get_source
def get_source(self, spec, row): """ Sources can be specified as plain strings or as a reference to a column. """ value = self.get_value({'column': spec.get('source_url_column')}, row) if value is not None: return value return spec.get('source_url')
python
def get_source(self, spec, row): """ Sources can be specified as plain strings or as a reference to a column. """ value = self.get_value({'column': spec.get('source_url_column')}, row) if value is not None: return value return spec.get('source_url')
[ "def", "get_source", "(", "self", ",", "spec", ",", "row", ")", ":", "value", "=", "self", ".", "get_value", "(", "{", "'column'", ":", "spec", ".", "get", "(", "'source_url_column'", ")", "}", ",", "row", ")", "if", "value", "is", "not", "None", ":", "return", "value", "return", "spec", ".", "get", "(", "'source_url'", ")" ]
Sources can be specified as plain strings or as a reference to a column.
[ "Sources", "can", "be", "specified", "as", "plain", "strings", "or", "as", "a", "reference", "to", "a", "column", "." ]
c48b1bd50403dd611340c5f51637f7c5ca54059c
https://github.com/ANCIR/granoloader/blob/c48b1bd50403dd611340c5f51637f7c5ca54059c/granoloader/mapping.py#L124-L129
240,205
ANCIR/granoloader
granoloader/mapping.py
MappingLoader.load
def load(self, data): """ Load a single row of data and convert it into entities and relations. """ objs = {} for mapper in self.entities: objs[mapper.name] = mapper.load(self.loader, data) for mapper in self.relations: objs[mapper.name] = mapper.load(self.loader, data, objs)
python
def load(self, data): """ Load a single row of data and convert it into entities and relations. """ objs = {} for mapper in self.entities: objs[mapper.name] = mapper.load(self.loader, data) for mapper in self.relations: objs[mapper.name] = mapper.load(self.loader, data, objs)
[ "def", "load", "(", "self", ",", "data", ")", ":", "objs", "=", "{", "}", "for", "mapper", "in", "self", ".", "entities", ":", "objs", "[", "mapper", ".", "name", "]", "=", "mapper", ".", "load", "(", "self", ".", "loader", ",", "data", ")", "for", "mapper", "in", "self", ".", "relations", ":", "objs", "[", "mapper", ".", "name", "]", "=", "mapper", ".", "load", "(", "self", ".", "loader", ",", "data", ",", "objs", ")" ]
Load a single row of data and convert it into entities and relations.
[ "Load", "a", "single", "row", "of", "data", "and", "convert", "it", "into", "entities", "and", "relations", "." ]
c48b1bd50403dd611340c5f51637f7c5ca54059c
https://github.com/ANCIR/granoloader/blob/c48b1bd50403dd611340c5f51637f7c5ca54059c/granoloader/mapping.py#L210-L218
240,206
olsoneric/pedemath
pedemath/vec3.py
add_v3
def add_v3(vec1, m): """Return a new Vec3 containing the sum of our x, y, z, and arg. If argument is a float or vec, addt it to our x, y, and z. Otherwise, treat it as a Vec3 and add arg.x, arg.y, and arg.z from our own x, y, and z. """ if type(m) in NUMERIC_TYPES: return Vec3(vec1.x + m, vec1.y + m, vec1.z + m) else: return Vec3(vec1.x + m.x, vec1.y + m.y, vec1.z + m.z)
python
def add_v3(vec1, m): """Return a new Vec3 containing the sum of our x, y, z, and arg. If argument is a float or vec, addt it to our x, y, and z. Otherwise, treat it as a Vec3 and add arg.x, arg.y, and arg.z from our own x, y, and z. """ if type(m) in NUMERIC_TYPES: return Vec3(vec1.x + m, vec1.y + m, vec1.z + m) else: return Vec3(vec1.x + m.x, vec1.y + m.y, vec1.z + m.z)
[ "def", "add_v3", "(", "vec1", ",", "m", ")", ":", "if", "type", "(", "m", ")", "in", "NUMERIC_TYPES", ":", "return", "Vec3", "(", "vec1", ".", "x", "+", "m", ",", "vec1", ".", "y", "+", "m", ",", "vec1", ".", "z", "+", "m", ")", "else", ":", "return", "Vec3", "(", "vec1", ".", "x", "+", "m", ".", "x", ",", "vec1", ".", "y", "+", "m", ".", "y", ",", "vec1", ".", "z", "+", "m", ".", "z", ")" ]
Return a new Vec3 containing the sum of our x, y, z, and arg. If argument is a float or vec, addt it to our x, y, and z. Otherwise, treat it as a Vec3 and add arg.x, arg.y, and arg.z from our own x, y, and z.
[ "Return", "a", "new", "Vec3", "containing", "the", "sum", "of", "our", "x", "y", "z", "and", "arg", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L22-L32
240,207
olsoneric/pedemath
pedemath/vec3.py
translate_v3
def translate_v3(vec, amount): """Return a new Vec3 that is translated version of vec.""" return Vec3(vec.x+amount, vec.y+amount, vec.z+amount)
python
def translate_v3(vec, amount): """Return a new Vec3 that is translated version of vec.""" return Vec3(vec.x+amount, vec.y+amount, vec.z+amount)
[ "def", "translate_v3", "(", "vec", ",", "amount", ")", ":", "return", "Vec3", "(", "vec", ".", "x", "+", "amount", ",", "vec", ".", "y", "+", "amount", ",", "vec", ".", "z", "+", "amount", ")" ]
Return a new Vec3 that is translated version of vec.
[ "Return", "a", "new", "Vec3", "that", "is", "translated", "version", "of", "vec", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L55-L58
240,208
olsoneric/pedemath
pedemath/vec3.py
scale_v3
def scale_v3(vec, amount): """Return a new Vec3 that is a scaled version of vec.""" return Vec3(vec.x*amount, vec.y*amount, vec.z*amount)
python
def scale_v3(vec, amount): """Return a new Vec3 that is a scaled version of vec.""" return Vec3(vec.x*amount, vec.y*amount, vec.z*amount)
[ "def", "scale_v3", "(", "vec", ",", "amount", ")", ":", "return", "Vec3", "(", "vec", ".", "x", "*", "amount", ",", "vec", ".", "y", "*", "amount", ",", "vec", ".", "z", "*", "amount", ")" ]
Return a new Vec3 that is a scaled version of vec.
[ "Return", "a", "new", "Vec3", "that", "is", "a", "scaled", "version", "of", "vec", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L61-L64
240,209
olsoneric/pedemath
pedemath/vec3.py
dot_v3
def dot_v3(v, w): """Return the dotproduct of two vectors.""" return sum([x * y for x, y in zip(v, w)])
python
def dot_v3(v, w): """Return the dotproduct of two vectors.""" return sum([x * y for x, y in zip(v, w)])
[ "def", "dot_v3", "(", "v", ",", "w", ")", ":", "return", "sum", "(", "[", "x", "*", "y", "for", "x", ",", "y", "in", "zip", "(", "v", ",", "w", ")", "]", ")" ]
Return the dotproduct of two vectors.
[ "Return", "the", "dotproduct", "of", "two", "vectors", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L73-L76
240,210
olsoneric/pedemath
pedemath/vec3.py
projection_as_vec_v3
def projection_as_vec_v3(v, w): """Return the signed length of the projection of vector v on vector w. Returns the full vector result of projection_v3(). """ proj_len = projection_v3(v, w) return scale_v3(v, proj_len)
python
def projection_as_vec_v3(v, w): """Return the signed length of the projection of vector v on vector w. Returns the full vector result of projection_v3(). """ proj_len = projection_v3(v, w) return scale_v3(v, proj_len)
[ "def", "projection_as_vec_v3", "(", "v", ",", "w", ")", ":", "proj_len", "=", "projection_v3", "(", "v", ",", "w", ")", "return", "scale_v3", "(", "v", ",", "proj_len", ")" ]
Return the signed length of the projection of vector v on vector w. Returns the full vector result of projection_v3().
[ "Return", "the", "signed", "length", "of", "the", "projection", "of", "vector", "v", "on", "vector", "w", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L96-L102
240,211
olsoneric/pedemath
pedemath/vec3.py
point_to_line
def point_to_line(point, segment_start, segment_end): """Given a point and a line segment, return the vector from the point to the closest point on the segment. """ # TODO: Needs unittests. segment_vec = segment_end - segment_start # t is distance along line t = -(segment_start - point).dot(segment_vec) / ( segment_vec.length_squared()) closest_point = segment_start + scale_v3(segment_vec, t) return point - closest_point
python
def point_to_line(point, segment_start, segment_end): """Given a point and a line segment, return the vector from the point to the closest point on the segment. """ # TODO: Needs unittests. segment_vec = segment_end - segment_start # t is distance along line t = -(segment_start - point).dot(segment_vec) / ( segment_vec.length_squared()) closest_point = segment_start + scale_v3(segment_vec, t) return point - closest_point
[ "def", "point_to_line", "(", "point", ",", "segment_start", ",", "segment_end", ")", ":", "# TODO: Needs unittests.", "segment_vec", "=", "segment_end", "-", "segment_start", "# t is distance along line", "t", "=", "-", "(", "segment_start", "-", "point", ")", ".", "dot", "(", "segment_vec", ")", "/", "(", "segment_vec", ".", "length_squared", "(", ")", ")", "closest_point", "=", "segment_start", "+", "scale_v3", "(", "segment_vec", ",", "t", ")", "return", "point", "-", "closest_point" ]
Given a point and a line segment, return the vector from the point to the closest point on the segment.
[ "Given", "a", "point", "and", "a", "line", "segment", "return", "the", "vector", "from", "the", "point", "to", "the", "closest", "point", "on", "the", "segment", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L105-L117
240,212
olsoneric/pedemath
pedemath/vec3.py
cross_v3
def cross_v3(vec_a, vec_b): """Return the crossproduct between vec_a and vec_b.""" return Vec3(vec_a.y * vec_b.z - vec_a.z * vec_b.y, vec_a.z * vec_b.x - vec_a.x * vec_b.z, vec_a.x * vec_b.y - vec_a.y * vec_b.x)
python
def cross_v3(vec_a, vec_b): """Return the crossproduct between vec_a and vec_b.""" return Vec3(vec_a.y * vec_b.z - vec_a.z * vec_b.y, vec_a.z * vec_b.x - vec_a.x * vec_b.z, vec_a.x * vec_b.y - vec_a.y * vec_b.x)
[ "def", "cross_v3", "(", "vec_a", ",", "vec_b", ")", ":", "return", "Vec3", "(", "vec_a", ".", "y", "*", "vec_b", ".", "z", "-", "vec_a", ".", "z", "*", "vec_b", ".", "y", ",", "vec_a", ".", "z", "*", "vec_b", ".", "x", "-", "vec_a", ".", "x", "*", "vec_b", ".", "z", ",", "vec_a", ".", "x", "*", "vec_b", ".", "y", "-", "vec_a", ".", "y", "*", "vec_b", ".", "x", ")" ]
Return the crossproduct between vec_a and vec_b.
[ "Return", "the", "crossproduct", "between", "vec_a", "and", "vec_b", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L120-L125
240,213
olsoneric/pedemath
pedemath/vec3.py
rotate_around_vector_v3
def rotate_around_vector_v3(v, angle_rad, norm_vec): """ rotate v around norm_vec by angle_rad.""" cos_val = math.cos(angle_rad) sin_val = math.sin(angle_rad) ## (v * cosVal) + ## ((normVec * v) * (1.0 - cosVal)) * normVec + ## (v ^ normVec) * sinVal) #line1: scaleV3(v,cosVal) #line2: dotV3( scaleV3( dotV3(normVec,v), 1.0-cosVal), normVec) #line3: scaleV3( crossV3( v,normVec), sinVal) #a = scaleV3(v,cosVal) #b = scaleV3( normVec, dotV3(normVec,v) * (1.0-cosVal)) #c = scaleV3( crossV3( v,normVec), sinVal) return add_v3( add_v3(scale_v3(v, cos_val), scale_v3(norm_vec, dot_v3(norm_vec, v) * (1.0 - cos_val))), scale_v3(cross_v3(v, norm_vec), sin_val) )
python
def rotate_around_vector_v3(v, angle_rad, norm_vec): """ rotate v around norm_vec by angle_rad.""" cos_val = math.cos(angle_rad) sin_val = math.sin(angle_rad) ## (v * cosVal) + ## ((normVec * v) * (1.0 - cosVal)) * normVec + ## (v ^ normVec) * sinVal) #line1: scaleV3(v,cosVal) #line2: dotV3( scaleV3( dotV3(normVec,v), 1.0-cosVal), normVec) #line3: scaleV3( crossV3( v,normVec), sinVal) #a = scaleV3(v,cosVal) #b = scaleV3( normVec, dotV3(normVec,v) * (1.0-cosVal)) #c = scaleV3( crossV3( v,normVec), sinVal) return add_v3( add_v3(scale_v3(v, cos_val), scale_v3(norm_vec, dot_v3(norm_vec, v) * (1.0 - cos_val))), scale_v3(cross_v3(v, norm_vec), sin_val) )
[ "def", "rotate_around_vector_v3", "(", "v", ",", "angle_rad", ",", "norm_vec", ")", ":", "cos_val", "=", "math", ".", "cos", "(", "angle_rad", ")", "sin_val", "=", "math", ".", "sin", "(", "angle_rad", ")", "## (v * cosVal) +", "## ((normVec * v) * (1.0 - cosVal)) * normVec +", "## (v ^ normVec) * sinVal)", "#line1: scaleV3(v,cosVal)", "#line2: dotV3( scaleV3( dotV3(normVec,v), 1.0-cosVal), normVec)", "#line3: scaleV3( crossV3( v,normVec), sinVal)", "#a = scaleV3(v,cosVal)", "#b = scaleV3( normVec, dotV3(normVec,v) * (1.0-cosVal))", "#c = scaleV3( crossV3( v,normVec), sinVal)", "return", "add_v3", "(", "add_v3", "(", "scale_v3", "(", "v", ",", "cos_val", ")", ",", "scale_v3", "(", "norm_vec", ",", "dot_v3", "(", "norm_vec", ",", "v", ")", "*", "(", "1.0", "-", "cos_val", ")", ")", ")", ",", "scale_v3", "(", "cross_v3", "(", "v", ",", "norm_vec", ")", ",", "sin_val", ")", ")" ]
rotate v around norm_vec by angle_rad.
[ "rotate", "v", "around", "norm_vec", "by", "angle_rad", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L136-L153
240,214
olsoneric/pedemath
pedemath/vec3.py
ave_list_v3
def ave_list_v3(vec_list): """Return the average vector of a list of vectors.""" vec = Vec3(0, 0, 0) for v in vec_list: vec += v num_vecs = float(len(vec_list)) vec = Vec3(vec.x / num_vecs, vec.y / num_vecs, vec.z / num_vecs) return vec
python
def ave_list_v3(vec_list): """Return the average vector of a list of vectors.""" vec = Vec3(0, 0, 0) for v in vec_list: vec += v num_vecs = float(len(vec_list)) vec = Vec3(vec.x / num_vecs, vec.y / num_vecs, vec.z / num_vecs) return vec
[ "def", "ave_list_v3", "(", "vec_list", ")", ":", "vec", "=", "Vec3", "(", "0", ",", "0", ",", "0", ")", "for", "v", "in", "vec_list", ":", "vec", "+=", "v", "num_vecs", "=", "float", "(", "len", "(", "vec_list", ")", ")", "vec", "=", "Vec3", "(", "vec", ".", "x", "/", "num_vecs", ",", "vec", ".", "y", "/", "num_vecs", ",", "vec", ".", "z", "/", "num_vecs", ")", "return", "vec" ]
Return the average vector of a list of vectors.
[ "Return", "the", "average", "vector", "of", "a", "list", "of", "vectors", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L156-L164
240,215
olsoneric/pedemath
pedemath/vec3.py
_float_almost_equal
def _float_almost_equal(float1, float2, places=7): """Return True if two numbers are equal up to the specified number of "places" after the decimal point. """ if round(abs(float2 - float1), places) == 0: return True return False
python
def _float_almost_equal(float1, float2, places=7): """Return True if two numbers are equal up to the specified number of "places" after the decimal point. """ if round(abs(float2 - float1), places) == 0: return True return False
[ "def", "_float_almost_equal", "(", "float1", ",", "float2", ",", "places", "=", "7", ")", ":", "if", "round", "(", "abs", "(", "float2", "-", "float1", ")", ",", "places", ")", "==", "0", ":", "return", "True", "return", "False" ]
Return True if two numbers are equal up to the specified number of "places" after the decimal point.
[ "Return", "True", "if", "two", "numbers", "are", "equal", "up", "to", "the", "specified", "number", "of", "places", "after", "the", "decimal", "point", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L428-L436
240,216
olsoneric/pedemath
pedemath/vec3.py
Vec3.almost_equal
def almost_equal(self, v2, places=7): """When comparing for equality, compare floats up to a limited precision specified by "places". """ try: return ( len(self) == len(v2) and _float_almost_equal(self.x, v2.x, places) and _float_almost_equal(self.y, v2.y, places) and _float_almost_equal(self.z, v2.z, places)) except: return False
python
def almost_equal(self, v2, places=7): """When comparing for equality, compare floats up to a limited precision specified by "places". """ try: return ( len(self) == len(v2) and _float_almost_equal(self.x, v2.x, places) and _float_almost_equal(self.y, v2.y, places) and _float_almost_equal(self.z, v2.z, places)) except: return False
[ "def", "almost_equal", "(", "self", ",", "v2", ",", "places", "=", "7", ")", ":", "try", ":", "return", "(", "len", "(", "self", ")", "==", "len", "(", "v2", ")", "and", "_float_almost_equal", "(", "self", ".", "x", ",", "v2", ".", "x", ",", "places", ")", "and", "_float_almost_equal", "(", "self", ".", "y", ",", "v2", ".", "y", ",", "places", ")", "and", "_float_almost_equal", "(", "self", ".", "z", ",", "v2", ".", "z", ",", "places", ")", ")", "except", ":", "return", "False" ]
When comparing for equality, compare floats up to a limited precision specified by "places".
[ "When", "comparing", "for", "equality", "compare", "floats", "up", "to", "a", "limited", "precision", "specified", "by", "places", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L291-L303
240,217
olsoneric/pedemath
pedemath/vec3.py
Vec3.dot
def dot(self, w): """Return the dotproduct between self and another vector.""" return sum([x * y for x, y in zip(self, w)])
python
def dot(self, w): """Return the dotproduct between self and another vector.""" return sum([x * y for x, y in zip(self, w)])
[ "def", "dot", "(", "self", ",", "w", ")", ":", "return", "sum", "(", "[", "x", "*", "y", "for", "x", ",", "y", "in", "zip", "(", "self", ",", "w", ")", "]", ")" ]
Return the dotproduct between self and another vector.
[ "Return", "the", "dotproduct", "between", "self", "and", "another", "vector", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L321-L324
240,218
olsoneric/pedemath
pedemath/vec3.py
Vec3.cross
def cross(self, vec): """Return the crossproduct between self and vec.""" return Vec3(self.y * vec.z - self.z * vec.y, self.z * vec.x - self.x * vec.z, self.x * vec.y - self.y * vec.x)
python
def cross(self, vec): """Return the crossproduct between self and vec.""" return Vec3(self.y * vec.z - self.z * vec.y, self.z * vec.x - self.x * vec.z, self.x * vec.y - self.y * vec.x)
[ "def", "cross", "(", "self", ",", "vec", ")", ":", "return", "Vec3", "(", "self", ".", "y", "*", "vec", ".", "z", "-", "self", ".", "z", "*", "vec", ".", "y", ",", "self", ".", "z", "*", "vec", ".", "x", "-", "self", ".", "x", "*", "vec", ".", "z", ",", "self", ".", "x", "*", "vec", ".", "y", "-", "self", ".", "y", "*", "vec", ".", "x", ")" ]
Return the crossproduct between self and vec.
[ "Return", "the", "crossproduct", "between", "self", "and", "vec", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L326-L331
240,219
olsoneric/pedemath
pedemath/vec3.py
Vec3.set
def set(self, x, y, z): """Set x, y, and z components. Also return self. """ self.x = x self.y = y self.z = z return self
python
def set(self, x, y, z): """Set x, y, and z components. Also return self. """ self.x = x self.y = y self.z = z return self
[ "def", "set", "(", "self", ",", "x", ",", "y", ",", "z", ")", ":", "self", ".", "x", "=", "x", "self", ".", "y", "=", "y", "self", ".", "z", "=", "z", "return", "self" ]
Set x, y, and z components. Also return self.
[ "Set", "x", "y", "and", "z", "components", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L333-L342
240,220
olsoneric/pedemath
pedemath/vec3.py
Vec3.neg
def neg(self): """Negative value of all components.""" self.x = -self.x self.y = -self.y self.z = -self.z
python
def neg(self): """Negative value of all components.""" self.x = -self.x self.y = -self.y self.z = -self.z
[ "def", "neg", "(", "self", ")", ":", "self", ".", "x", "=", "-", "self", ".", "x", "self", ".", "y", "=", "-", "self", ".", "y", "self", ".", "z", "=", "-", "self", ".", "z" ]
Negative value of all components.
[ "Negative", "value", "of", "all", "components", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/vec3.py#L344-L348
240,221
CivicSpleen/ckcache
ckcache/filesystem.py
FsCache.put_stream
def put_stream(self, rel_path, metadata=None, cb=None): """return a file object to write into the cache. The caller is responsibile for closing the stream """ from io import IOBase if not isinstance(rel_path, basestring): rel_path = rel_path.cache_key repo_path = os.path.join(self.cache_dir, rel_path.strip("/")) if not os.path.isdir(os.path.dirname(repo_path)): os.makedirs(os.path.dirname(repo_path)) if os.path.exists(repo_path): os.remove(repo_path) sink = open(repo_path, 'wb') upstream = self.upstream class flo(IOBase): '''This File-Like-Object class ensures that the file is also sent to the upstream after it is stored in the FSCache. ''' def __init__(self, sink, upstream, repo_path, rel_path): self._sink = sink self._upstream = upstream self._repo_path = repo_path self._rel_path = rel_path @property def rel_path(self): return self._rel_path def write(self, str_): self._sink.write(str_) def close(self): if not self._sink.closed: # print "Closing put_stream.flo {} # is_closed={}!".format(self._repo_path, self._sink.closed) self._sink.close() if self._upstream and not self._upstream.readonly and not self._upstream.usreadonly: self._upstream.put( self._repo_path, self._rel_path, metadata=metadata) def __enter__(self): # Can be used as a context! return self def __exit__(self, type_, value, traceback): if type_: return False self.close() self.put_metadata(rel_path, metadata) return flo(sink, upstream, repo_path, rel_path)
python
def put_stream(self, rel_path, metadata=None, cb=None): """return a file object to write into the cache. The caller is responsibile for closing the stream """ from io import IOBase if not isinstance(rel_path, basestring): rel_path = rel_path.cache_key repo_path = os.path.join(self.cache_dir, rel_path.strip("/")) if not os.path.isdir(os.path.dirname(repo_path)): os.makedirs(os.path.dirname(repo_path)) if os.path.exists(repo_path): os.remove(repo_path) sink = open(repo_path, 'wb') upstream = self.upstream class flo(IOBase): '''This File-Like-Object class ensures that the file is also sent to the upstream after it is stored in the FSCache. ''' def __init__(self, sink, upstream, repo_path, rel_path): self._sink = sink self._upstream = upstream self._repo_path = repo_path self._rel_path = rel_path @property def rel_path(self): return self._rel_path def write(self, str_): self._sink.write(str_) def close(self): if not self._sink.closed: # print "Closing put_stream.flo {} # is_closed={}!".format(self._repo_path, self._sink.closed) self._sink.close() if self._upstream and not self._upstream.readonly and not self._upstream.usreadonly: self._upstream.put( self._repo_path, self._rel_path, metadata=metadata) def __enter__(self): # Can be used as a context! return self def __exit__(self, type_, value, traceback): if type_: return False self.close() self.put_metadata(rel_path, metadata) return flo(sink, upstream, repo_path, rel_path)
[ "def", "put_stream", "(", "self", ",", "rel_path", ",", "metadata", "=", "None", ",", "cb", "=", "None", ")", ":", "from", "io", "import", "IOBase", "if", "not", "isinstance", "(", "rel_path", ",", "basestring", ")", ":", "rel_path", "=", "rel_path", ".", "cache_key", "repo_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "cache_dir", ",", "rel_path", ".", "strip", "(", "\"/\"", ")", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "dirname", "(", "repo_path", ")", ")", ":", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "repo_path", ")", ")", "if", "os", ".", "path", ".", "exists", "(", "repo_path", ")", ":", "os", ".", "remove", "(", "repo_path", ")", "sink", "=", "open", "(", "repo_path", ",", "'wb'", ")", "upstream", "=", "self", ".", "upstream", "class", "flo", "(", "IOBase", ")", ":", "'''This File-Like-Object class ensures that the file is also\n sent to the upstream after it is stored in the FSCache. '''", "def", "__init__", "(", "self", ",", "sink", ",", "upstream", ",", "repo_path", ",", "rel_path", ")", ":", "self", ".", "_sink", "=", "sink", "self", ".", "_upstream", "=", "upstream", "self", ".", "_repo_path", "=", "repo_path", "self", ".", "_rel_path", "=", "rel_path", "@", "property", "def", "rel_path", "(", "self", ")", ":", "return", "self", ".", "_rel_path", "def", "write", "(", "self", ",", "str_", ")", ":", "self", ".", "_sink", ".", "write", "(", "str_", ")", "def", "close", "(", "self", ")", ":", "if", "not", "self", ".", "_sink", ".", "closed", ":", "# print \"Closing put_stream.flo {}", "# is_closed={}!\".format(self._repo_path, self._sink.closed)", "self", ".", "_sink", ".", "close", "(", ")", "if", "self", ".", "_upstream", "and", "not", "self", ".", "_upstream", ".", "readonly", "and", "not", "self", ".", "_upstream", ".", "usreadonly", ":", "self", ".", "_upstream", ".", "put", "(", "self", ".", "_repo_path", ",", "self", ".", "_rel_path", ",", "metadata", "=", "metadata", ")", "def", "__enter__", "(", "self", ")", ":", "# Can be used as a context!", "return", "self", "def", "__exit__", "(", "self", ",", "type_", ",", "value", ",", "traceback", ")", ":", "if", "type_", ":", "return", "False", "self", ".", "close", "(", ")", "self", ".", "put_metadata", "(", "rel_path", ",", "metadata", ")", "return", "flo", "(", "sink", ",", "upstream", ",", "repo_path", ",", "rel_path", ")" ]
return a file object to write into the cache. The caller is responsibile for closing the stream
[ "return", "a", "file", "object", "to", "write", "into", "the", "cache", ".", "The", "caller", "is", "responsibile", "for", "closing", "the", "stream" ]
0c699b6ba97ff164e9702504f0e1643dd4cd39e1
https://github.com/CivicSpleen/ckcache/blob/0c699b6ba97ff164e9702504f0e1643dd4cd39e1/ckcache/filesystem.py#L97-L161
240,222
CivicSpleen/ckcache
ckcache/filesystem.py
FsLimitedCache.size
def size(self): '''Return the size of all of the files referenced in the database''' c = self.database.cursor() r = c.execute("SELECT sum(size) FROM files") try: size = int(r.fetchone()[0]) except TypeError: size = 0 return size
python
def size(self): '''Return the size of all of the files referenced in the database''' c = self.database.cursor() r = c.execute("SELECT sum(size) FROM files") try: size = int(r.fetchone()[0]) except TypeError: size = 0 return size
[ "def", "size", "(", "self", ")", ":", "c", "=", "self", ".", "database", ".", "cursor", "(", ")", "r", "=", "c", ".", "execute", "(", "\"SELECT sum(size) FROM files\"", ")", "try", ":", "size", "=", "int", "(", "r", ".", "fetchone", "(", ")", "[", "0", "]", ")", "except", "TypeError", ":", "size", "=", "0", "return", "size" ]
Return the size of all of the files referenced in the database
[ "Return", "the", "size", "of", "all", "of", "the", "files", "referenced", "in", "the", "database" ]
0c699b6ba97ff164e9702504f0e1643dd4cd39e1
https://github.com/CivicSpleen/ckcache/blob/0c699b6ba97ff164e9702504f0e1643dd4cd39e1/ckcache/filesystem.py#L496-L506
240,223
CivicSpleen/ckcache
ckcache/filesystem.py
FsLimitedCache._free_up_space
def _free_up_space(self, size, this_rel_path=None): '''If there are not size bytes of space left, delete files until there is Args: size: size of the current file this_rel_path: rel_pat to the current file, so we don't delete it. ''' # Amount of space we are over ( bytes ) for next put space = self.size + size - self.maxsize if space <= 0: return removes = [] for row in self.database.execute("SELECT path, size, time FROM files ORDER BY time ASC"): if space > 0: removes.append(row[0]) space -= row[1] else: break for rel_path in removes: if rel_path != this_rel_path: global_logger.debug("Deleting {}".format(rel_path)) self.remove(rel_path)
python
def _free_up_space(self, size, this_rel_path=None): '''If there are not size bytes of space left, delete files until there is Args: size: size of the current file this_rel_path: rel_pat to the current file, so we don't delete it. ''' # Amount of space we are over ( bytes ) for next put space = self.size + size - self.maxsize if space <= 0: return removes = [] for row in self.database.execute("SELECT path, size, time FROM files ORDER BY time ASC"): if space > 0: removes.append(row[0]) space -= row[1] else: break for rel_path in removes: if rel_path != this_rel_path: global_logger.debug("Deleting {}".format(rel_path)) self.remove(rel_path)
[ "def", "_free_up_space", "(", "self", ",", "size", ",", "this_rel_path", "=", "None", ")", ":", "# Amount of space we are over ( bytes ) for next put", "space", "=", "self", ".", "size", "+", "size", "-", "self", ".", "maxsize", "if", "space", "<=", "0", ":", "return", "removes", "=", "[", "]", "for", "row", "in", "self", ".", "database", ".", "execute", "(", "\"SELECT path, size, time FROM files ORDER BY time ASC\"", ")", ":", "if", "space", ">", "0", ":", "removes", ".", "append", "(", "row", "[", "0", "]", ")", "space", "-=", "row", "[", "1", "]", "else", ":", "break", "for", "rel_path", "in", "removes", ":", "if", "rel_path", "!=", "this_rel_path", ":", "global_logger", ".", "debug", "(", "\"Deleting {}\"", ".", "format", "(", "rel_path", ")", ")", "self", ".", "remove", "(", "rel_path", ")" ]
If there are not size bytes of space left, delete files until there is Args: size: size of the current file this_rel_path: rel_pat to the current file, so we don't delete it.
[ "If", "there", "are", "not", "size", "bytes", "of", "space", "left", "delete", "files", "until", "there", "is" ]
0c699b6ba97ff164e9702504f0e1643dd4cd39e1
https://github.com/CivicSpleen/ckcache/blob/0c699b6ba97ff164e9702504f0e1643dd4cd39e1/ckcache/filesystem.py#L508-L537
240,224
CivicSpleen/ckcache
ckcache/filesystem.py
FsLimitedCache.verify
def verify(self): '''Check that the database accurately describes the state of the repository''' c = self.database.cursor() non_exist = set() no_db_entry = set(os.listdir(self.cache_dir)) try: no_db_entry.remove('file_database.db') no_db_entry.remove('file_database.db-journal') except: pass for row in c.execute("SELECT path FROM files"): path = row[0] repo_path = os.path.join(self.cache_dir, path) if os.path.exists(repo_path): no_db_entry.remove(path) else: non_exist.add(path) if len(non_exist) > 0: raise Exception( "Found {} records in db for files that don't exist: {}" .format( len(non_exist), ','.join(non_exist))) if len(no_db_entry) > 0: raise Exception("Found {} files that don't have db entries: {}" .format(len(no_db_entry), ','.join(no_db_entry)))
python
def verify(self): '''Check that the database accurately describes the state of the repository''' c = self.database.cursor() non_exist = set() no_db_entry = set(os.listdir(self.cache_dir)) try: no_db_entry.remove('file_database.db') no_db_entry.remove('file_database.db-journal') except: pass for row in c.execute("SELECT path FROM files"): path = row[0] repo_path = os.path.join(self.cache_dir, path) if os.path.exists(repo_path): no_db_entry.remove(path) else: non_exist.add(path) if len(non_exist) > 0: raise Exception( "Found {} records in db for files that don't exist: {}" .format( len(non_exist), ','.join(non_exist))) if len(no_db_entry) > 0: raise Exception("Found {} files that don't have db entries: {}" .format(len(no_db_entry), ','.join(no_db_entry)))
[ "def", "verify", "(", "self", ")", ":", "c", "=", "self", ".", "database", ".", "cursor", "(", ")", "non_exist", "=", "set", "(", ")", "no_db_entry", "=", "set", "(", "os", ".", "listdir", "(", "self", ".", "cache_dir", ")", ")", "try", ":", "no_db_entry", ".", "remove", "(", "'file_database.db'", ")", "no_db_entry", ".", "remove", "(", "'file_database.db-journal'", ")", "except", ":", "pass", "for", "row", "in", "c", ".", "execute", "(", "\"SELECT path FROM files\"", ")", ":", "path", "=", "row", "[", "0", "]", "repo_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "cache_dir", ",", "path", ")", "if", "os", ".", "path", ".", "exists", "(", "repo_path", ")", ":", "no_db_entry", ".", "remove", "(", "path", ")", "else", ":", "non_exist", ".", "add", "(", "path", ")", "if", "len", "(", "non_exist", ")", ">", "0", ":", "raise", "Exception", "(", "\"Found {} records in db for files that don't exist: {}\"", ".", "format", "(", "len", "(", "non_exist", ")", ",", "','", ".", "join", "(", "non_exist", ")", ")", ")", "if", "len", "(", "no_db_entry", ")", ">", "0", ":", "raise", "Exception", "(", "\"Found {} files that don't have db entries: {}\"", ".", "format", "(", "len", "(", "no_db_entry", ")", ",", "','", ".", "join", "(", "no_db_entry", ")", ")", ")" ]
Check that the database accurately describes the state of the repository
[ "Check", "that", "the", "database", "accurately", "describes", "the", "state", "of", "the", "repository" ]
0c699b6ba97ff164e9702504f0e1643dd4cd39e1
https://github.com/CivicSpleen/ckcache/blob/0c699b6ba97ff164e9702504f0e1643dd4cd39e1/ckcache/filesystem.py#L552-L583
240,225
CivicSpleen/ckcache
ckcache/filesystem.py
FsLimitedCache.put_stream
def put_stream(self, rel_path, metadata=None, cb=None): """return a file object to write into the cache. The caller is responsibile for closing the stream. Bad things happen if you dont close the stream """ class flo: def __init__(self, this, sink, upstream, repo_path): self.this = this self.sink = sink self.upstream = upstream self.repo_path = repo_path @property def repo_path(self): return self.repo_path def write(self, d): self.sink.write(d) if self.upstream: self.upstream.write(d) def writelines(self, lines): raise NotImplemented() def close(self): self.sink.close() size = os.path.getsize(self.repo_path) self.this.add_record(rel_path, size) self.this._free_up_space(size, this_rel_path=rel_path) if self.upstream: self.upstream.close() def __enter__(self): # Can be used as a context! return self def __exit__(self, type_, value, traceback): if type_: return False if not isinstance(rel_path, basestring): rel_path = rel_path.cache_key repo_path = os.path.join(self.cache_dir, rel_path.strip('/')) if not os.path.isdir(os.path.dirname(repo_path)): os.makedirs(os.path.dirname(repo_path)) self.put_metadata(rel_path, metadata=metadata) sink = open(repo_path, 'w+') upstream = self.upstream.put_stream( rel_path, metadata=metadata) if self.upstream else None return flo(self, sink, upstream, repo_path)
python
def put_stream(self, rel_path, metadata=None, cb=None): """return a file object to write into the cache. The caller is responsibile for closing the stream. Bad things happen if you dont close the stream """ class flo: def __init__(self, this, sink, upstream, repo_path): self.this = this self.sink = sink self.upstream = upstream self.repo_path = repo_path @property def repo_path(self): return self.repo_path def write(self, d): self.sink.write(d) if self.upstream: self.upstream.write(d) def writelines(self, lines): raise NotImplemented() def close(self): self.sink.close() size = os.path.getsize(self.repo_path) self.this.add_record(rel_path, size) self.this._free_up_space(size, this_rel_path=rel_path) if self.upstream: self.upstream.close() def __enter__(self): # Can be used as a context! return self def __exit__(self, type_, value, traceback): if type_: return False if not isinstance(rel_path, basestring): rel_path = rel_path.cache_key repo_path = os.path.join(self.cache_dir, rel_path.strip('/')) if not os.path.isdir(os.path.dirname(repo_path)): os.makedirs(os.path.dirname(repo_path)) self.put_metadata(rel_path, metadata=metadata) sink = open(repo_path, 'w+') upstream = self.upstream.put_stream( rel_path, metadata=metadata) if self.upstream else None return flo(self, sink, upstream, repo_path)
[ "def", "put_stream", "(", "self", ",", "rel_path", ",", "metadata", "=", "None", ",", "cb", "=", "None", ")", ":", "class", "flo", ":", "def", "__init__", "(", "self", ",", "this", ",", "sink", ",", "upstream", ",", "repo_path", ")", ":", "self", ".", "this", "=", "this", "self", ".", "sink", "=", "sink", "self", ".", "upstream", "=", "upstream", "self", ".", "repo_path", "=", "repo_path", "@", "property", "def", "repo_path", "(", "self", ")", ":", "return", "self", ".", "repo_path", "def", "write", "(", "self", ",", "d", ")", ":", "self", ".", "sink", ".", "write", "(", "d", ")", "if", "self", ".", "upstream", ":", "self", ".", "upstream", ".", "write", "(", "d", ")", "def", "writelines", "(", "self", ",", "lines", ")", ":", "raise", "NotImplemented", "(", ")", "def", "close", "(", "self", ")", ":", "self", ".", "sink", ".", "close", "(", ")", "size", "=", "os", ".", "path", ".", "getsize", "(", "self", ".", "repo_path", ")", "self", ".", "this", ".", "add_record", "(", "rel_path", ",", "size", ")", "self", ".", "this", ".", "_free_up_space", "(", "size", ",", "this_rel_path", "=", "rel_path", ")", "if", "self", ".", "upstream", ":", "self", ".", "upstream", ".", "close", "(", ")", "def", "__enter__", "(", "self", ")", ":", "# Can be used as a context!", "return", "self", "def", "__exit__", "(", "self", ",", "type_", ",", "value", ",", "traceback", ")", ":", "if", "type_", ":", "return", "False", "if", "not", "isinstance", "(", "rel_path", ",", "basestring", ")", ":", "rel_path", "=", "rel_path", ".", "cache_key", "repo_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "cache_dir", ",", "rel_path", ".", "strip", "(", "'/'", ")", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "dirname", "(", "repo_path", ")", ")", ":", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "repo_path", ")", ")", "self", ".", "put_metadata", "(", "rel_path", ",", "metadata", "=", "metadata", ")", "sink", "=", "open", "(", "repo_path", ",", "'w+'", ")", "upstream", "=", "self", ".", "upstream", ".", "put_stream", "(", "rel_path", ",", "metadata", "=", "metadata", ")", "if", "self", ".", "upstream", "else", "None", "return", "flo", "(", "self", ",", "sink", ",", "upstream", ",", "repo_path", ")" ]
return a file object to write into the cache. The caller is responsibile for closing the stream. Bad things happen if you dont close the stream
[ "return", "a", "file", "object", "to", "write", "into", "the", "cache", ".", "The", "caller", "is", "responsibile", "for", "closing", "the", "stream", ".", "Bad", "things", "happen", "if", "you", "dont", "close", "the", "stream" ]
0c699b6ba97ff164e9702504f0e1643dd4cd39e1
https://github.com/CivicSpleen/ckcache/blob/0c699b6ba97ff164e9702504f0e1643dd4cd39e1/ckcache/filesystem.py#L687-L747
240,226
waltermoreira/tartpy
tartpy/membrane.py
MembraneFactory.create_proxy
def create_proxy(self, this, message): """Create proxy for an actor. `message` has the form:: {'tag': 'create_proxy', 'actor': ..., 'customer': ... } """ actor = message['actor'] proxy = self._create_proxy(this, actor) message['customer'] << proxy
python
def create_proxy(self, this, message): """Create proxy for an actor. `message` has the form:: {'tag': 'create_proxy', 'actor': ..., 'customer': ... } """ actor = message['actor'] proxy = self._create_proxy(this, actor) message['customer'] << proxy
[ "def", "create_proxy", "(", "self", ",", "this", ",", "message", ")", ":", "actor", "=", "message", "[", "'actor'", "]", "proxy", "=", "self", ".", "_create_proxy", "(", "this", ",", "actor", ")", "message", "[", "'customer'", "]", "<<", "proxy" ]
Create proxy for an actor. `message` has the form:: {'tag': 'create_proxy', 'actor': ..., 'customer': ... }
[ "Create", "proxy", "for", "an", "actor", "." ]
d9f66c8b373bd55a7b055c0fd39b516490bb0235
https://github.com/waltermoreira/tartpy/blob/d9f66c8b373bd55a7b055c0fd39b516490bb0235/tartpy/membrane.py#L47-L60
240,227
jalanb/pysyte
pysyte/dictionaries.py
get_caselessly
def get_caselessly(dictionary, sought): """Find the sought key in the given dictionary regardless of case >>> things = {'Fred' : 9} >>> print(get_caselessly(things, 'fred')) 9 """ try: return dictionary[sought] except KeyError: caseless_keys = {k.lower(): k for k in dictionary.keys()} real_key = caseless_keys[sought.lower()] # allow any KeyError here return dictionary[real_key]
python
def get_caselessly(dictionary, sought): """Find the sought key in the given dictionary regardless of case >>> things = {'Fred' : 9} >>> print(get_caselessly(things, 'fred')) 9 """ try: return dictionary[sought] except KeyError: caseless_keys = {k.lower(): k for k in dictionary.keys()} real_key = caseless_keys[sought.lower()] # allow any KeyError here return dictionary[real_key]
[ "def", "get_caselessly", "(", "dictionary", ",", "sought", ")", ":", "try", ":", "return", "dictionary", "[", "sought", "]", "except", "KeyError", ":", "caseless_keys", "=", "{", "k", ".", "lower", "(", ")", ":", "k", "for", "k", "in", "dictionary", ".", "keys", "(", ")", "}", "real_key", "=", "caseless_keys", "[", "sought", ".", "lower", "(", ")", "]", "# allow any KeyError here", "return", "dictionary", "[", "real_key", "]" ]
Find the sought key in the given dictionary regardless of case >>> things = {'Fred' : 9} >>> print(get_caselessly(things, 'fred')) 9
[ "Find", "the", "sought", "key", "in", "the", "given", "dictionary", "regardless", "of", "case" ]
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/dictionaries.py#L8-L20
240,228
jalanb/pysyte
pysyte/dictionaries.py
append_value
def append_value(dictionary, key, item): """Append those items to the values for that key""" items = dictionary.get(key, []) items.append(item) dictionary[key] = items
python
def append_value(dictionary, key, item): """Append those items to the values for that key""" items = dictionary.get(key, []) items.append(item) dictionary[key] = items
[ "def", "append_value", "(", "dictionary", ",", "key", ",", "item", ")", ":", "items", "=", "dictionary", ".", "get", "(", "key", ",", "[", "]", ")", "items", ".", "append", "(", "item", ")", "dictionary", "[", "key", "]", "=", "items" ]
Append those items to the values for that key
[ "Append", "those", "items", "to", "the", "values", "for", "that", "key" ]
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/dictionaries.py#L34-L38
240,229
jalanb/pysyte
pysyte/dictionaries.py
extend_values
def extend_values(dictionary, key, items): """Extend the values for that key with the items""" values = dictionary.get(key, []) try: values.extend(items) except TypeError: raise TypeError('Expected a list, got: %r' % items) dictionary[key] = values
python
def extend_values(dictionary, key, items): """Extend the values for that key with the items""" values = dictionary.get(key, []) try: values.extend(items) except TypeError: raise TypeError('Expected a list, got: %r' % items) dictionary[key] = values
[ "def", "extend_values", "(", "dictionary", ",", "key", ",", "items", ")", ":", "values", "=", "dictionary", ".", "get", "(", "key", ",", "[", "]", ")", "try", ":", "values", ".", "extend", "(", "items", ")", "except", "TypeError", ":", "raise", "TypeError", "(", "'Expected a list, got: %r'", "%", "items", ")", "dictionary", "[", "key", "]", "=", "values" ]
Extend the values for that key with the items
[ "Extend", "the", "values", "for", "that", "key", "with", "the", "items" ]
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/dictionaries.py#L41-L48
240,230
diffeo/rejester
rejester/workers.py
MultiWorker.run
def run(self): '''Fetch and dispatch jobs as long as the system is running. This periodically checks the :class:`rejester.TaskMaster` mode and asks it for more work. It will normally run forever in a loop until the mode becomes :attr:`~rejester.TaskMaster.TERMINATE`, at which point it waits for all outstanding jobs to finish and exits. This will :func:`~rejester.Worker.heartbeat` and check for new work whenever a job finishes, or otherwise on a random interval between 1 and 5 seconds. ''' tm = self.task_master num_workers = multiprocessing.cpu_count() if 'tasks_per_cpu' in self.config: num_workers *= self.config.get('tasks_per_cpu') or 1 if self.pool is None: self.pool = multiprocessing.Pool(num_workers, maxtasksperchild=1) ## slots is a fixed-length list of [AsyncRsults, WorkUnit] slots = [[None, None]] * num_workers logger.info('MultiWorker starting with %s workers', num_workers) min_loop_time = 2.0 lastFullPoll = time.time() while True: mode = self.heartbeat() if mode != self._mode: logger.info('worker {0} changed to mode {1}' .format(self.worker_id, mode)) self._mode = mode now = time.time() should_update = (now - lastFullPoll) > min_loop_time self._poll_slots(slots, mode=mode, do_update=should_update) if should_update: lastFullPoll = now if mode == tm.TERMINATE: num_waiting = sum(map(int, map(bool, map(itemgetter(0), slots)))) if num_waiting == 0: logger.info('MultiWorker all children finished') break else: logger.info('MultiWorker waiting for %d children to finish', num_waiting) sleepsecs = random.uniform(1,5) sleepstart = time.time() try: self._event_queue.get(block=True, timeout=sleepsecs) logger.debug('woken by event looptime=%s sleeptime=%s', sleepstart - now, time.time() - sleepstart) except Queue.Empty, empty: logger.debug('queue timed out. be exhausting, looptime=%s sleeptime=%s', sleepstart - now, time.time() - sleepstart) # it's cool, timed out, do the loop of checks and stuff. logger.info('MultiWorker exiting')
python
def run(self): '''Fetch and dispatch jobs as long as the system is running. This periodically checks the :class:`rejester.TaskMaster` mode and asks it for more work. It will normally run forever in a loop until the mode becomes :attr:`~rejester.TaskMaster.TERMINATE`, at which point it waits for all outstanding jobs to finish and exits. This will :func:`~rejester.Worker.heartbeat` and check for new work whenever a job finishes, or otherwise on a random interval between 1 and 5 seconds. ''' tm = self.task_master num_workers = multiprocessing.cpu_count() if 'tasks_per_cpu' in self.config: num_workers *= self.config.get('tasks_per_cpu') or 1 if self.pool is None: self.pool = multiprocessing.Pool(num_workers, maxtasksperchild=1) ## slots is a fixed-length list of [AsyncRsults, WorkUnit] slots = [[None, None]] * num_workers logger.info('MultiWorker starting with %s workers', num_workers) min_loop_time = 2.0 lastFullPoll = time.time() while True: mode = self.heartbeat() if mode != self._mode: logger.info('worker {0} changed to mode {1}' .format(self.worker_id, mode)) self._mode = mode now = time.time() should_update = (now - lastFullPoll) > min_loop_time self._poll_slots(slots, mode=mode, do_update=should_update) if should_update: lastFullPoll = now if mode == tm.TERMINATE: num_waiting = sum(map(int, map(bool, map(itemgetter(0), slots)))) if num_waiting == 0: logger.info('MultiWorker all children finished') break else: logger.info('MultiWorker waiting for %d children to finish', num_waiting) sleepsecs = random.uniform(1,5) sleepstart = time.time() try: self._event_queue.get(block=True, timeout=sleepsecs) logger.debug('woken by event looptime=%s sleeptime=%s', sleepstart - now, time.time() - sleepstart) except Queue.Empty, empty: logger.debug('queue timed out. be exhausting, looptime=%s sleeptime=%s', sleepstart - now, time.time() - sleepstart) # it's cool, timed out, do the loop of checks and stuff. logger.info('MultiWorker exiting')
[ "def", "run", "(", "self", ")", ":", "tm", "=", "self", ".", "task_master", "num_workers", "=", "multiprocessing", ".", "cpu_count", "(", ")", "if", "'tasks_per_cpu'", "in", "self", ".", "config", ":", "num_workers", "*=", "self", ".", "config", ".", "get", "(", "'tasks_per_cpu'", ")", "or", "1", "if", "self", ".", "pool", "is", "None", ":", "self", ".", "pool", "=", "multiprocessing", ".", "Pool", "(", "num_workers", ",", "maxtasksperchild", "=", "1", ")", "## slots is a fixed-length list of [AsyncRsults, WorkUnit]", "slots", "=", "[", "[", "None", ",", "None", "]", "]", "*", "num_workers", "logger", ".", "info", "(", "'MultiWorker starting with %s workers'", ",", "num_workers", ")", "min_loop_time", "=", "2.0", "lastFullPoll", "=", "time", ".", "time", "(", ")", "while", "True", ":", "mode", "=", "self", ".", "heartbeat", "(", ")", "if", "mode", "!=", "self", ".", "_mode", ":", "logger", ".", "info", "(", "'worker {0} changed to mode {1}'", ".", "format", "(", "self", ".", "worker_id", ",", "mode", ")", ")", "self", ".", "_mode", "=", "mode", "now", "=", "time", ".", "time", "(", ")", "should_update", "=", "(", "now", "-", "lastFullPoll", ")", ">", "min_loop_time", "self", ".", "_poll_slots", "(", "slots", ",", "mode", "=", "mode", ",", "do_update", "=", "should_update", ")", "if", "should_update", ":", "lastFullPoll", "=", "now", "if", "mode", "==", "tm", ".", "TERMINATE", ":", "num_waiting", "=", "sum", "(", "map", "(", "int", ",", "map", "(", "bool", ",", "map", "(", "itemgetter", "(", "0", ")", ",", "slots", ")", ")", ")", ")", "if", "num_waiting", "==", "0", ":", "logger", ".", "info", "(", "'MultiWorker all children finished'", ")", "break", "else", ":", "logger", ".", "info", "(", "'MultiWorker waiting for %d children to finish'", ",", "num_waiting", ")", "sleepsecs", "=", "random", ".", "uniform", "(", "1", ",", "5", ")", "sleepstart", "=", "time", ".", "time", "(", ")", "try", ":", "self", ".", "_event_queue", ".", "get", "(", "block", "=", "True", ",", "timeout", "=", "sleepsecs", ")", "logger", ".", "debug", "(", "'woken by event looptime=%s sleeptime=%s'", ",", "sleepstart", "-", "now", ",", "time", ".", "time", "(", ")", "-", "sleepstart", ")", "except", "Queue", ".", "Empty", ",", "empty", ":", "logger", ".", "debug", "(", "'queue timed out. be exhausting, looptime=%s sleeptime=%s'", ",", "sleepstart", "-", "now", ",", "time", ".", "time", "(", ")", "-", "sleepstart", ")", "# it's cool, timed out, do the loop of checks and stuff.", "logger", ".", "info", "(", "'MultiWorker exiting'", ")" ]
Fetch and dispatch jobs as long as the system is running. This periodically checks the :class:`rejester.TaskMaster` mode and asks it for more work. It will normally run forever in a loop until the mode becomes :attr:`~rejester.TaskMaster.TERMINATE`, at which point it waits for all outstanding jobs to finish and exits. This will :func:`~rejester.Worker.heartbeat` and check for new work whenever a job finishes, or otherwise on a random interval between 1 and 5 seconds.
[ "Fetch", "and", "dispatch", "jobs", "as", "long", "as", "the", "system", "is", "running", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L291-L346
240,231
diffeo/rejester
rejester/workers.py
SingleWorker.run_one
def run_one(self, set_title=False): '''Get exactly one job, run it, and return. Does nothing (but returns :const:`False`) if there is no work to do. Ignores the global mode; this will do work even if :func:`rejester.TaskMaster.get_mode` returns :attr:`~rejester.TaskMaster.TERMINATE`. :param set_title: if true, set the process's title with the work unit name :return: :const:`True` if there was a job (even if it failed) ''' available_gb = MultiWorker.available_gb() unit = self.task_master.get_work(self.worker_id, available_gb, work_spec_names=self.work_spec_names, max_jobs=self.max_jobs) if not unit: logger.info('No work to do; stopping.') return False if isinstance(unit, (list, tuple)): ok = True for xunit in unit: if not ok: try: xunit.update(-1) except LostLease as e: pass except Exception as bad: # we're already quitting everything, but this is weirdly bad. logger.error('failed to release lease on %r %r', xunit.work_spec_name, xunit.key, exc_info=True) else: ok = self._run_unit(xunit, set_title) return ok return self._run_unit(unit)
python
def run_one(self, set_title=False): '''Get exactly one job, run it, and return. Does nothing (but returns :const:`False`) if there is no work to do. Ignores the global mode; this will do work even if :func:`rejester.TaskMaster.get_mode` returns :attr:`~rejester.TaskMaster.TERMINATE`. :param set_title: if true, set the process's title with the work unit name :return: :const:`True` if there was a job (even if it failed) ''' available_gb = MultiWorker.available_gb() unit = self.task_master.get_work(self.worker_id, available_gb, work_spec_names=self.work_spec_names, max_jobs=self.max_jobs) if not unit: logger.info('No work to do; stopping.') return False if isinstance(unit, (list, tuple)): ok = True for xunit in unit: if not ok: try: xunit.update(-1) except LostLease as e: pass except Exception as bad: # we're already quitting everything, but this is weirdly bad. logger.error('failed to release lease on %r %r', xunit.work_spec_name, xunit.key, exc_info=True) else: ok = self._run_unit(xunit, set_title) return ok return self._run_unit(unit)
[ "def", "run_one", "(", "self", ",", "set_title", "=", "False", ")", ":", "available_gb", "=", "MultiWorker", ".", "available_gb", "(", ")", "unit", "=", "self", ".", "task_master", ".", "get_work", "(", "self", ".", "worker_id", ",", "available_gb", ",", "work_spec_names", "=", "self", ".", "work_spec_names", ",", "max_jobs", "=", "self", ".", "max_jobs", ")", "if", "not", "unit", ":", "logger", ".", "info", "(", "'No work to do; stopping.'", ")", "return", "False", "if", "isinstance", "(", "unit", ",", "(", "list", ",", "tuple", ")", ")", ":", "ok", "=", "True", "for", "xunit", "in", "unit", ":", "if", "not", "ok", ":", "try", ":", "xunit", ".", "update", "(", "-", "1", ")", "except", "LostLease", "as", "e", ":", "pass", "except", "Exception", "as", "bad", ":", "# we're already quitting everything, but this is weirdly bad.", "logger", ".", "error", "(", "'failed to release lease on %r %r'", ",", "xunit", ".", "work_spec_name", ",", "xunit", ".", "key", ",", "exc_info", "=", "True", ")", "else", ":", "ok", "=", "self", ".", "_run_unit", "(", "xunit", ",", "set_title", ")", "return", "ok", "return", "self", ".", "_run_unit", "(", "unit", ")" ]
Get exactly one job, run it, and return. Does nothing (but returns :const:`False`) if there is no work to do. Ignores the global mode; this will do work even if :func:`rejester.TaskMaster.get_mode` returns :attr:`~rejester.TaskMaster.TERMINATE`. :param set_title: if true, set the process's title with the work unit name :return: :const:`True` if there was a job (even if it failed)
[ "Get", "exactly", "one", "job", "run", "it", "and", "return", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L375-L407
240,232
diffeo/rejester
rejester/workers.py
SingleWorker.as_child
def as_child(cls, global_config, parent=None): '''Run a single job in a child process. This method never returns; it always calls :func:`sys.exit` with an error code that says what it did. ''' try: setproctitle('rejester worker') random.seed() # otherwise everyone inherits the same seed yakonfig.set_default_config([yakonfig, dblogger, rejester], config=global_config) worker = cls(yakonfig.get_global_config(rejester.config_name)) worker.register(parent=parent) did_work = worker.run(set_title=True) worker.unregister() if did_work: sys.exit(cls.EXIT_SUCCESS) else: sys.exit(cls.EXIT_BORED) except Exception, e: # There's some off chance we have logging. # You will be here if redis is down, for instance, # and the yakonfig dblogger setup runs but then # the get_work call fails with an exception. if len(logging.root.handlers) > 0: logger.critical('failed to do any work', exc_info=e) sys.exit(cls.EXIT_EXCEPTION)
python
def as_child(cls, global_config, parent=None): '''Run a single job in a child process. This method never returns; it always calls :func:`sys.exit` with an error code that says what it did. ''' try: setproctitle('rejester worker') random.seed() # otherwise everyone inherits the same seed yakonfig.set_default_config([yakonfig, dblogger, rejester], config=global_config) worker = cls(yakonfig.get_global_config(rejester.config_name)) worker.register(parent=parent) did_work = worker.run(set_title=True) worker.unregister() if did_work: sys.exit(cls.EXIT_SUCCESS) else: sys.exit(cls.EXIT_BORED) except Exception, e: # There's some off chance we have logging. # You will be here if redis is down, for instance, # and the yakonfig dblogger setup runs but then # the get_work call fails with an exception. if len(logging.root.handlers) > 0: logger.critical('failed to do any work', exc_info=e) sys.exit(cls.EXIT_EXCEPTION)
[ "def", "as_child", "(", "cls", ",", "global_config", ",", "parent", "=", "None", ")", ":", "try", ":", "setproctitle", "(", "'rejester worker'", ")", "random", ".", "seed", "(", ")", "# otherwise everyone inherits the same seed", "yakonfig", ".", "set_default_config", "(", "[", "yakonfig", ",", "dblogger", ",", "rejester", "]", ",", "config", "=", "global_config", ")", "worker", "=", "cls", "(", "yakonfig", ".", "get_global_config", "(", "rejester", ".", "config_name", ")", ")", "worker", ".", "register", "(", "parent", "=", "parent", ")", "did_work", "=", "worker", ".", "run", "(", "set_title", "=", "True", ")", "worker", ".", "unregister", "(", ")", "if", "did_work", ":", "sys", ".", "exit", "(", "cls", ".", "EXIT_SUCCESS", ")", "else", ":", "sys", ".", "exit", "(", "cls", ".", "EXIT_BORED", ")", "except", "Exception", ",", "e", ":", "# There's some off chance we have logging.", "# You will be here if redis is down, for instance,", "# and the yakonfig dblogger setup runs but then", "# the get_work call fails with an exception.", "if", "len", "(", "logging", ".", "root", ".", "handlers", ")", ">", "0", ":", "logger", ".", "critical", "(", "'failed to do any work'", ",", "exc_info", "=", "e", ")", "sys", ".", "exit", "(", "cls", ".", "EXIT_EXCEPTION", ")" ]
Run a single job in a child process. This method never returns; it always calls :func:`sys.exit` with an error code that says what it did.
[ "Run", "a", "single", "job", "in", "a", "child", "process", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L433-L460
240,233
diffeo/rejester
rejester/workers.py
ForkWorker.set_signal_handlers
def set_signal_handlers(self): '''Set some signal handlers. These react reasonably to shutdown requests, and keep the logging child alive. ''' def handler(f): def wrapper(signum, backtrace): return f() return wrapper self.old_sigabrt = signal.signal(signal.SIGABRT, handler(self.scram)) self.old_sigint = signal.signal(signal.SIGINT, handler(self.stop_gracefully)) self.old_sigpipe = signal.signal(signal.SIGPIPE, handler(self.live_log_child)) signal.siginterrupt(signal.SIGPIPE, False) self.old_sigterm = signal.signal(signal.SIGTERM, handler(self.stop_gracefully))
python
def set_signal_handlers(self): '''Set some signal handlers. These react reasonably to shutdown requests, and keep the logging child alive. ''' def handler(f): def wrapper(signum, backtrace): return f() return wrapper self.old_sigabrt = signal.signal(signal.SIGABRT, handler(self.scram)) self.old_sigint = signal.signal(signal.SIGINT, handler(self.stop_gracefully)) self.old_sigpipe = signal.signal(signal.SIGPIPE, handler(self.live_log_child)) signal.siginterrupt(signal.SIGPIPE, False) self.old_sigterm = signal.signal(signal.SIGTERM, handler(self.stop_gracefully))
[ "def", "set_signal_handlers", "(", "self", ")", ":", "def", "handler", "(", "f", ")", ":", "def", "wrapper", "(", "signum", ",", "backtrace", ")", ":", "return", "f", "(", ")", "return", "wrapper", "self", ".", "old_sigabrt", "=", "signal", ".", "signal", "(", "signal", ".", "SIGABRT", ",", "handler", "(", "self", ".", "scram", ")", ")", "self", ".", "old_sigint", "=", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "handler", "(", "self", ".", "stop_gracefully", ")", ")", "self", ".", "old_sigpipe", "=", "signal", ".", "signal", "(", "signal", ".", "SIGPIPE", ",", "handler", "(", "self", ".", "live_log_child", ")", ")", "signal", ".", "siginterrupt", "(", "signal", ".", "SIGPIPE", ",", "False", ")", "self", ".", "old_sigterm", "=", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "handler", "(", "self", ".", "stop_gracefully", ")", ")" ]
Set some signal handlers. These react reasonably to shutdown requests, and keep the logging child alive.
[ "Set", "some", "signal", "handlers", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L668-L688
240,234
diffeo/rejester
rejester/workers.py
ForkWorker.log
def log(self, level, message): '''Write a log message via the child process. The child process must already exist; call :meth:`live_log_child` to make sure. If it has died in a way we don't expect then this will raise :const:`signal.SIGPIPE`. ''' if self.log_fd is not None: prefix = struct.pack('ii', level, len(message)) os.write(self.log_fd, prefix) os.write(self.log_fd, message)
python
def log(self, level, message): '''Write a log message via the child process. The child process must already exist; call :meth:`live_log_child` to make sure. If it has died in a way we don't expect then this will raise :const:`signal.SIGPIPE`. ''' if self.log_fd is not None: prefix = struct.pack('ii', level, len(message)) os.write(self.log_fd, prefix) os.write(self.log_fd, message)
[ "def", "log", "(", "self", ",", "level", ",", "message", ")", ":", "if", "self", ".", "log_fd", "is", "not", "None", ":", "prefix", "=", "struct", ".", "pack", "(", "'ii'", ",", "level", ",", "len", "(", "message", ")", ")", "os", ".", "write", "(", "self", ".", "log_fd", ",", "prefix", ")", "os", ".", "write", "(", "self", ".", "log_fd", ",", "message", ")" ]
Write a log message via the child process. The child process must already exist; call :meth:`live_log_child` to make sure. If it has died in a way we don't expect then this will raise :const:`signal.SIGPIPE`.
[ "Write", "a", "log", "message", "via", "the", "child", "process", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L702-L713
240,235
diffeo/rejester
rejester/workers.py
ForkWorker.debug
def debug(self, group, message): '''Maybe write a debug-level log message. In particular, this gets written if the hidden `debug_worker` option contains `group`. ''' if group in self.debug_worker: if 'stdout' in self.debug_worker: print message self.log(logging.DEBUG, message)
python
def debug(self, group, message): '''Maybe write a debug-level log message. In particular, this gets written if the hidden `debug_worker` option contains `group`. ''' if group in self.debug_worker: if 'stdout' in self.debug_worker: print message self.log(logging.DEBUG, message)
[ "def", "debug", "(", "self", ",", "group", ",", "message", ")", ":", "if", "group", "in", "self", ".", "debug_worker", ":", "if", "'stdout'", "in", "self", ".", "debug_worker", ":", "print", "message", "self", ".", "log", "(", "logging", ".", "DEBUG", ",", "message", ")" ]
Maybe write a debug-level log message. In particular, this gets written if the hidden `debug_worker` option contains `group`.
[ "Maybe", "write", "a", "debug", "-", "level", "log", "message", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L715-L725
240,236
diffeo/rejester
rejester/workers.py
ForkWorker.log_spewer
def log_spewer(self, gconfig, fd): '''Child process to manage logging. This reads pairs of lines from `fd`, which are alternating priority (Python integer) and message (unformatted string). ''' setproctitle('rejester fork_worker log task') yakonfig.set_default_config([yakonfig, dblogger], config=gconfig) try: while True: prefix = os.read(fd, struct.calcsize('ii')) level, msglen = struct.unpack('ii', prefix) msg = os.read(fd, msglen) logger.log(level, msg) except Exception, e: logger.critical('log writer failed', exc_info=e) raise
python
def log_spewer(self, gconfig, fd): '''Child process to manage logging. This reads pairs of lines from `fd`, which are alternating priority (Python integer) and message (unformatted string). ''' setproctitle('rejester fork_worker log task') yakonfig.set_default_config([yakonfig, dblogger], config=gconfig) try: while True: prefix = os.read(fd, struct.calcsize('ii')) level, msglen = struct.unpack('ii', prefix) msg = os.read(fd, msglen) logger.log(level, msg) except Exception, e: logger.critical('log writer failed', exc_info=e) raise
[ "def", "log_spewer", "(", "self", ",", "gconfig", ",", "fd", ")", ":", "setproctitle", "(", "'rejester fork_worker log task'", ")", "yakonfig", ".", "set_default_config", "(", "[", "yakonfig", ",", "dblogger", "]", ",", "config", "=", "gconfig", ")", "try", ":", "while", "True", ":", "prefix", "=", "os", ".", "read", "(", "fd", ",", "struct", ".", "calcsize", "(", "'ii'", ")", ")", "level", ",", "msglen", "=", "struct", ".", "unpack", "(", "'ii'", ",", "prefix", ")", "msg", "=", "os", ".", "read", "(", "fd", ",", "msglen", ")", "logger", ".", "log", "(", "level", ",", "msg", ")", "except", "Exception", ",", "e", ":", "logger", ".", "critical", "(", "'log writer failed'", ",", "exc_info", "=", "e", ")", "raise" ]
Child process to manage logging. This reads pairs of lines from `fd`, which are alternating priority (Python integer) and message (unformatted string).
[ "Child", "process", "to", "manage", "logging", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L727-L744
240,237
diffeo/rejester
rejester/workers.py
ForkWorker.start_log_child
def start_log_child(self): '''Start the logging child process.''' self.stop_log_child() gconfig = yakonfig.get_global_config() read_end, write_end = os.pipe() pid = os.fork() if pid == 0: # We are the child self.clear_signal_handlers() os.close(write_end) yakonfig.clear_global_config() self.log_spewer(gconfig, read_end) sys.exit(0) else: # We are the parent self.debug('children', 'new log child with pid {0}'.format(pid)) self.log_child = pid os.close(read_end) self.log_fd = write_end
python
def start_log_child(self): '''Start the logging child process.''' self.stop_log_child() gconfig = yakonfig.get_global_config() read_end, write_end = os.pipe() pid = os.fork() if pid == 0: # We are the child self.clear_signal_handlers() os.close(write_end) yakonfig.clear_global_config() self.log_spewer(gconfig, read_end) sys.exit(0) else: # We are the parent self.debug('children', 'new log child with pid {0}'.format(pid)) self.log_child = pid os.close(read_end) self.log_fd = write_end
[ "def", "start_log_child", "(", "self", ")", ":", "self", ".", "stop_log_child", "(", ")", "gconfig", "=", "yakonfig", ".", "get_global_config", "(", ")", "read_end", ",", "write_end", "=", "os", ".", "pipe", "(", ")", "pid", "=", "os", ".", "fork", "(", ")", "if", "pid", "==", "0", ":", "# We are the child", "self", ".", "clear_signal_handlers", "(", ")", "os", ".", "close", "(", "write_end", ")", "yakonfig", ".", "clear_global_config", "(", ")", "self", ".", "log_spewer", "(", "gconfig", ",", "read_end", ")", "sys", ".", "exit", "(", "0", ")", "else", ":", "# We are the parent", "self", ".", "debug", "(", "'children'", ",", "'new log child with pid {0}'", ".", "format", "(", "pid", ")", ")", "self", ".", "log_child", "=", "pid", "os", ".", "close", "(", "read_end", ")", "self", ".", "log_fd", "=", "write_end" ]
Start the logging child process.
[ "Start", "the", "logging", "child", "process", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L746-L764
240,238
diffeo/rejester
rejester/workers.py
ForkWorker.stop_log_child
def stop_log_child(self): '''Stop the logging child process.''' if self.log_fd: os.close(self.log_fd) self.log_fd = None if self.log_child: try: self.debug('children', 'stopping log child with pid {0}' .format(self.log_child)) os.kill(self.log_child, signal.SIGTERM) os.waitpid(self.log_child, 0) except OSError, e: if e.errno == errno.ESRCH or e.errno == errno.ECHILD: # already gone pass else: raise self.log_child = None
python
def stop_log_child(self): '''Stop the logging child process.''' if self.log_fd: os.close(self.log_fd) self.log_fd = None if self.log_child: try: self.debug('children', 'stopping log child with pid {0}' .format(self.log_child)) os.kill(self.log_child, signal.SIGTERM) os.waitpid(self.log_child, 0) except OSError, e: if e.errno == errno.ESRCH or e.errno == errno.ECHILD: # already gone pass else: raise self.log_child = None
[ "def", "stop_log_child", "(", "self", ")", ":", "if", "self", ".", "log_fd", ":", "os", ".", "close", "(", "self", ".", "log_fd", ")", "self", ".", "log_fd", "=", "None", "if", "self", ".", "log_child", ":", "try", ":", "self", ".", "debug", "(", "'children'", ",", "'stopping log child with pid {0}'", ".", "format", "(", "self", ".", "log_child", ")", ")", "os", ".", "kill", "(", "self", ".", "log_child", ",", "signal", ".", "SIGTERM", ")", "os", ".", "waitpid", "(", "self", ".", "log_child", ",", "0", ")", "except", "OSError", ",", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ESRCH", "or", "e", ".", "errno", "==", "errno", ".", "ECHILD", ":", "# already gone", "pass", "else", ":", "raise", "self", ".", "log_child", "=", "None" ]
Stop the logging child process.
[ "Stop", "the", "logging", "child", "process", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L766-L783
240,239
diffeo/rejester
rejester/workers.py
ForkWorker.live_log_child
def live_log_child(self): '''Start the logging child process if it died.''' if not (self.log_child and self.pid_is_alive(self.log_child)): self.start_log_child()
python
def live_log_child(self): '''Start the logging child process if it died.''' if not (self.log_child and self.pid_is_alive(self.log_child)): self.start_log_child()
[ "def", "live_log_child", "(", "self", ")", ":", "if", "not", "(", "self", ".", "log_child", "and", "self", ".", "pid_is_alive", "(", "self", ".", "log_child", ")", ")", ":", "self", ".", "start_log_child", "(", ")" ]
Start the logging child process if it died.
[ "Start", "the", "logging", "child", "process", "if", "it", "died", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L785-L788
240,240
diffeo/rejester
rejester/workers.py
ForkWorker.do_some_work
def do_some_work(self, can_start_more): '''Run one cycle of the main loop. If the log child has died, restart it. If any of the worker children have died, collect their status codes and remove them from the child set. If there is a worker slot available, start exactly one child. :param bool can_start_more: Allowed to start a child? :return: Time to wait before calling this function again ''' any_happy_children = False any_sad_children = False any_bored_children = False self.debug('loop', 'starting work loop, can_start_more={0!r}' .format(can_start_more)) # See if anyone has died while True: try: pid, status = os.waitpid(-1, os.WNOHANG) except OSError, e: if e.errno == errno.ECHILD: # No children at all pid = 0 else: raise if pid == 0: break elif pid == self.log_child: self.debug('children', 'log child with pid {0} exited'.format(pid)) self.start_log_child() elif pid in self.children: self.children.remove(pid) if os.WIFEXITED(status): code = os.WEXITSTATUS(status) self.debug('children', 'worker {0} exited with code {1}' .format(pid, code)) if code == SingleWorker.EXIT_SUCCESS: any_happy_children = True elif code == SingleWorker.EXIT_EXCEPTION: self.log(logging.WARNING, 'child {0} reported failure'.format(pid)) any_sad_children = True elif code == SingleWorker.EXIT_BORED: any_bored_children = True else: self.log(logging.WARNING, 'child {0} had odd exit code {1}' .format(pid, code)) elif os.WIFSIGNALED(status): self.log(logging.WARNING, 'child {0} exited with signal {1}' .format(pid, os.WTERMSIG(status))) any_sad_children = True else: self.log(logging.WARNING, 'child {0} went away with unknown status {1}' .format(pid, status)) any_sad_children = True else: self.log(logging.WARNING, 'child {0} exited, but we don\'t recognize it' .format(pid)) # ...what next? # (Don't log anything here; either we logged a WARNING message # above when things went badly, or we're in a very normal flow # and don't want to spam the log) if any_sad_children: self.debug('loop', 'exit work loop with sad child') return self.poll_interval if any_bored_children: self.debug('loop', 'exit work loop with no work') return self.poll_interval # This means we get to start a child, maybe. if can_start_more and len(self.children) < self.num_workers: pid = os.fork() if pid == 0: # We are the child self.clear_signal_handlers() if self.log_fd: os.close(self.log_fd) LoopWorker.as_child(yakonfig.get_global_config(), parent=self.worker_id) # This should never return, but just in case sys.exit(SingleWorker.EXIT_EXCEPTION) else: # We are the parent self.debug('children', 'new worker with pid {0}'.format(pid)) self.children.add(pid) self.debug('loop', 'exit work loop with a new worker') return self.spawn_interval # Absolutely nothing is happening; which means we have all # of our potential workers and they're doing work self.debug('loop', 'exit work loop with full system') return self.poll_interval
python
def do_some_work(self, can_start_more): '''Run one cycle of the main loop. If the log child has died, restart it. If any of the worker children have died, collect their status codes and remove them from the child set. If there is a worker slot available, start exactly one child. :param bool can_start_more: Allowed to start a child? :return: Time to wait before calling this function again ''' any_happy_children = False any_sad_children = False any_bored_children = False self.debug('loop', 'starting work loop, can_start_more={0!r}' .format(can_start_more)) # See if anyone has died while True: try: pid, status = os.waitpid(-1, os.WNOHANG) except OSError, e: if e.errno == errno.ECHILD: # No children at all pid = 0 else: raise if pid == 0: break elif pid == self.log_child: self.debug('children', 'log child with pid {0} exited'.format(pid)) self.start_log_child() elif pid in self.children: self.children.remove(pid) if os.WIFEXITED(status): code = os.WEXITSTATUS(status) self.debug('children', 'worker {0} exited with code {1}' .format(pid, code)) if code == SingleWorker.EXIT_SUCCESS: any_happy_children = True elif code == SingleWorker.EXIT_EXCEPTION: self.log(logging.WARNING, 'child {0} reported failure'.format(pid)) any_sad_children = True elif code == SingleWorker.EXIT_BORED: any_bored_children = True else: self.log(logging.WARNING, 'child {0} had odd exit code {1}' .format(pid, code)) elif os.WIFSIGNALED(status): self.log(logging.WARNING, 'child {0} exited with signal {1}' .format(pid, os.WTERMSIG(status))) any_sad_children = True else: self.log(logging.WARNING, 'child {0} went away with unknown status {1}' .format(pid, status)) any_sad_children = True else: self.log(logging.WARNING, 'child {0} exited, but we don\'t recognize it' .format(pid)) # ...what next? # (Don't log anything here; either we logged a WARNING message # above when things went badly, or we're in a very normal flow # and don't want to spam the log) if any_sad_children: self.debug('loop', 'exit work loop with sad child') return self.poll_interval if any_bored_children: self.debug('loop', 'exit work loop with no work') return self.poll_interval # This means we get to start a child, maybe. if can_start_more and len(self.children) < self.num_workers: pid = os.fork() if pid == 0: # We are the child self.clear_signal_handlers() if self.log_fd: os.close(self.log_fd) LoopWorker.as_child(yakonfig.get_global_config(), parent=self.worker_id) # This should never return, but just in case sys.exit(SingleWorker.EXIT_EXCEPTION) else: # We are the parent self.debug('children', 'new worker with pid {0}'.format(pid)) self.children.add(pid) self.debug('loop', 'exit work loop with a new worker') return self.spawn_interval # Absolutely nothing is happening; which means we have all # of our potential workers and they're doing work self.debug('loop', 'exit work loop with full system') return self.poll_interval
[ "def", "do_some_work", "(", "self", ",", "can_start_more", ")", ":", "any_happy_children", "=", "False", "any_sad_children", "=", "False", "any_bored_children", "=", "False", "self", ".", "debug", "(", "'loop'", ",", "'starting work loop, can_start_more={0!r}'", ".", "format", "(", "can_start_more", ")", ")", "# See if anyone has died", "while", "True", ":", "try", ":", "pid", ",", "status", "=", "os", ".", "waitpid", "(", "-", "1", ",", "os", ".", "WNOHANG", ")", "except", "OSError", ",", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ECHILD", ":", "# No children at all", "pid", "=", "0", "else", ":", "raise", "if", "pid", "==", "0", ":", "break", "elif", "pid", "==", "self", ".", "log_child", ":", "self", ".", "debug", "(", "'children'", ",", "'log child with pid {0} exited'", ".", "format", "(", "pid", ")", ")", "self", ".", "start_log_child", "(", ")", "elif", "pid", "in", "self", ".", "children", ":", "self", ".", "children", ".", "remove", "(", "pid", ")", "if", "os", ".", "WIFEXITED", "(", "status", ")", ":", "code", "=", "os", ".", "WEXITSTATUS", "(", "status", ")", "self", ".", "debug", "(", "'children'", ",", "'worker {0} exited with code {1}'", ".", "format", "(", "pid", ",", "code", ")", ")", "if", "code", "==", "SingleWorker", ".", "EXIT_SUCCESS", ":", "any_happy_children", "=", "True", "elif", "code", "==", "SingleWorker", ".", "EXIT_EXCEPTION", ":", "self", ".", "log", "(", "logging", ".", "WARNING", ",", "'child {0} reported failure'", ".", "format", "(", "pid", ")", ")", "any_sad_children", "=", "True", "elif", "code", "==", "SingleWorker", ".", "EXIT_BORED", ":", "any_bored_children", "=", "True", "else", ":", "self", ".", "log", "(", "logging", ".", "WARNING", ",", "'child {0} had odd exit code {1}'", ".", "format", "(", "pid", ",", "code", ")", ")", "elif", "os", ".", "WIFSIGNALED", "(", "status", ")", ":", "self", ".", "log", "(", "logging", ".", "WARNING", ",", "'child {0} exited with signal {1}'", ".", "format", "(", "pid", ",", "os", ".", "WTERMSIG", "(", "status", ")", ")", ")", "any_sad_children", "=", "True", "else", ":", "self", ".", "log", "(", "logging", ".", "WARNING", ",", "'child {0} went away with unknown status {1}'", ".", "format", "(", "pid", ",", "status", ")", ")", "any_sad_children", "=", "True", "else", ":", "self", ".", "log", "(", "logging", ".", "WARNING", ",", "'child {0} exited, but we don\\'t recognize it'", ".", "format", "(", "pid", ")", ")", "# ...what next?", "# (Don't log anything here; either we logged a WARNING message", "# above when things went badly, or we're in a very normal flow", "# and don't want to spam the log)", "if", "any_sad_children", ":", "self", ".", "debug", "(", "'loop'", ",", "'exit work loop with sad child'", ")", "return", "self", ".", "poll_interval", "if", "any_bored_children", ":", "self", ".", "debug", "(", "'loop'", ",", "'exit work loop with no work'", ")", "return", "self", ".", "poll_interval", "# This means we get to start a child, maybe.", "if", "can_start_more", "and", "len", "(", "self", ".", "children", ")", "<", "self", ".", "num_workers", ":", "pid", "=", "os", ".", "fork", "(", ")", "if", "pid", "==", "0", ":", "# We are the child", "self", ".", "clear_signal_handlers", "(", ")", "if", "self", ".", "log_fd", ":", "os", ".", "close", "(", "self", ".", "log_fd", ")", "LoopWorker", ".", "as_child", "(", "yakonfig", ".", "get_global_config", "(", ")", ",", "parent", "=", "self", ".", "worker_id", ")", "# This should never return, but just in case", "sys", ".", "exit", "(", "SingleWorker", ".", "EXIT_EXCEPTION", ")", "else", ":", "# We are the parent", "self", ".", "debug", "(", "'children'", ",", "'new worker with pid {0}'", ".", "format", "(", "pid", ")", ")", "self", ".", "children", ".", "add", "(", "pid", ")", "self", ".", "debug", "(", "'loop'", ",", "'exit work loop with a new worker'", ")", "return", "self", ".", "spawn_interval", "# Absolutely nothing is happening; which means we have all", "# of our potential workers and they're doing work", "self", ".", "debug", "(", "'loop'", ",", "'exit work loop with full system'", ")", "return", "self", ".", "poll_interval" ]
Run one cycle of the main loop. If the log child has died, restart it. If any of the worker children have died, collect their status codes and remove them from the child set. If there is a worker slot available, start exactly one child. :param bool can_start_more: Allowed to start a child? :return: Time to wait before calling this function again
[ "Run", "one", "cycle", "of", "the", "main", "loop", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L790-L893
240,241
diffeo/rejester
rejester/workers.py
ForkWorker.check_spinning_children
def check_spinning_children(self): '''Stop children that are working on overdue jobs.''' child_jobs = self.task_master.get_child_work_units(self.worker_id) # We will kill off any jobs that are due before "now". This # isn't really now now, but now plus a grace period to make # sure spinning jobs don't get retried. now = time.time() + self.stop_jobs_early for child, wul in child_jobs.iteritems(): if not isinstance(wul, (list, tuple)): # Support old style get_child_work_units which returns # single WorkUnit objects instead of list of them. wul = [wul] if not wul: # This worker is idle, but oddly, still present; it should # clean up after itself continue # filter on those actually assigned to the child worker wul = filter(lambda wu: wu.worker_id == child, wul) # check for any still active not-overdue job if any(filter(lambda wu: wu.expires > now, wul)): continue # So either someone else is doing its work or it's just overdue environment = self.task_master.get_heartbeat(child) if not environment: continue # derp if 'pid' not in environment: continue # derp if environment['pid'] not in self.children: continue # derp os.kill(environment['pid'], signal.SIGTERM) # This will cause the child to die, and do_some_work will # reap it; but we'd also like the job to fail if possible for wu in wul: if wu.data is None: logger.critical('how did wu.data become: %r' % wu.data) else: wu.data['traceback'] = 'job expired' wu.fail(exc=Exception('job expired'))
python
def check_spinning_children(self): '''Stop children that are working on overdue jobs.''' child_jobs = self.task_master.get_child_work_units(self.worker_id) # We will kill off any jobs that are due before "now". This # isn't really now now, but now plus a grace period to make # sure spinning jobs don't get retried. now = time.time() + self.stop_jobs_early for child, wul in child_jobs.iteritems(): if not isinstance(wul, (list, tuple)): # Support old style get_child_work_units which returns # single WorkUnit objects instead of list of them. wul = [wul] if not wul: # This worker is idle, but oddly, still present; it should # clean up after itself continue # filter on those actually assigned to the child worker wul = filter(lambda wu: wu.worker_id == child, wul) # check for any still active not-overdue job if any(filter(lambda wu: wu.expires > now, wul)): continue # So either someone else is doing its work or it's just overdue environment = self.task_master.get_heartbeat(child) if not environment: continue # derp if 'pid' not in environment: continue # derp if environment['pid'] not in self.children: continue # derp os.kill(environment['pid'], signal.SIGTERM) # This will cause the child to die, and do_some_work will # reap it; but we'd also like the job to fail if possible for wu in wul: if wu.data is None: logger.critical('how did wu.data become: %r' % wu.data) else: wu.data['traceback'] = 'job expired' wu.fail(exc=Exception('job expired'))
[ "def", "check_spinning_children", "(", "self", ")", ":", "child_jobs", "=", "self", ".", "task_master", ".", "get_child_work_units", "(", "self", ".", "worker_id", ")", "# We will kill off any jobs that are due before \"now\". This", "# isn't really now now, but now plus a grace period to make", "# sure spinning jobs don't get retried.", "now", "=", "time", ".", "time", "(", ")", "+", "self", ".", "stop_jobs_early", "for", "child", ",", "wul", "in", "child_jobs", ".", "iteritems", "(", ")", ":", "if", "not", "isinstance", "(", "wul", ",", "(", "list", ",", "tuple", ")", ")", ":", "# Support old style get_child_work_units which returns", "# single WorkUnit objects instead of list of them.", "wul", "=", "[", "wul", "]", "if", "not", "wul", ":", "# This worker is idle, but oddly, still present; it should", "# clean up after itself", "continue", "# filter on those actually assigned to the child worker", "wul", "=", "filter", "(", "lambda", "wu", ":", "wu", ".", "worker_id", "==", "child", ",", "wul", ")", "# check for any still active not-overdue job", "if", "any", "(", "filter", "(", "lambda", "wu", ":", "wu", ".", "expires", ">", "now", ",", "wul", ")", ")", ":", "continue", "# So either someone else is doing its work or it's just overdue", "environment", "=", "self", ".", "task_master", ".", "get_heartbeat", "(", "child", ")", "if", "not", "environment", ":", "continue", "# derp", "if", "'pid'", "not", "in", "environment", ":", "continue", "# derp", "if", "environment", "[", "'pid'", "]", "not", "in", "self", ".", "children", ":", "continue", "# derp", "os", ".", "kill", "(", "environment", "[", "'pid'", "]", ",", "signal", ".", "SIGTERM", ")", "# This will cause the child to die, and do_some_work will", "# reap it; but we'd also like the job to fail if possible", "for", "wu", "in", "wul", ":", "if", "wu", ".", "data", "is", "None", ":", "logger", ".", "critical", "(", "'how did wu.data become: %r'", "%", "wu", ".", "data", ")", "else", ":", "wu", ".", "data", "[", "'traceback'", "]", "=", "'job expired'", "wu", ".", "fail", "(", "exc", "=", "Exception", "(", "'job expired'", ")", ")" ]
Stop children that are working on overdue jobs.
[ "Stop", "children", "that", "are", "working", "on", "overdue", "jobs", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L895-L932
240,242
diffeo/rejester
rejester/workers.py
ForkWorker.stop_gracefully
def stop_gracefully(self): '''Refuse to start more processes. This runs in response to SIGINT or SIGTERM; if this isn't a background process, control-C and a normal ``kill`` command cause this. ''' if self.shutting_down: self.log(logging.INFO, 'second shutdown request, shutting down now') self.scram() else: self.log(logging.INFO, 'shutting down after current jobs finish') self.shutting_down = True
python
def stop_gracefully(self): '''Refuse to start more processes. This runs in response to SIGINT or SIGTERM; if this isn't a background process, control-C and a normal ``kill`` command cause this. ''' if self.shutting_down: self.log(logging.INFO, 'second shutdown request, shutting down now') self.scram() else: self.log(logging.INFO, 'shutting down after current jobs finish') self.shutting_down = True
[ "def", "stop_gracefully", "(", "self", ")", ":", "if", "self", ".", "shutting_down", ":", "self", ".", "log", "(", "logging", ".", "INFO", ",", "'second shutdown request, shutting down now'", ")", "self", ".", "scram", "(", ")", "else", ":", "self", ".", "log", "(", "logging", ".", "INFO", ",", "'shutting down after current jobs finish'", ")", "self", ".", "shutting_down", "=", "True" ]
Refuse to start more processes. This runs in response to SIGINT or SIGTERM; if this isn't a background process, control-C and a normal ``kill`` command cause this.
[ "Refuse", "to", "start", "more", "processes", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L934-L948
240,243
diffeo/rejester
rejester/workers.py
ForkWorker.stop_all_children
def stop_all_children(self): '''Kill all workers.''' # There's an unfortunate race condition if we try to log this # case: we can't depend on the logging child actually receiving # the log message before we kill it off. C'est la vie... self.stop_log_child() for pid in self.children: try: os.kill(pid, signal.SIGTERM) os.waitpid(pid, 0) except OSError, e: if e.errno == errno.ESRCH or e.errno == errno.ECHILD: # No such process pass else: raise
python
def stop_all_children(self): '''Kill all workers.''' # There's an unfortunate race condition if we try to log this # case: we can't depend on the logging child actually receiving # the log message before we kill it off. C'est la vie... self.stop_log_child() for pid in self.children: try: os.kill(pid, signal.SIGTERM) os.waitpid(pid, 0) except OSError, e: if e.errno == errno.ESRCH or e.errno == errno.ECHILD: # No such process pass else: raise
[ "def", "stop_all_children", "(", "self", ")", ":", "# There's an unfortunate race condition if we try to log this", "# case: we can't depend on the logging child actually receiving", "# the log message before we kill it off. C'est la vie...", "self", ".", "stop_log_child", "(", ")", "for", "pid", "in", "self", ".", "children", ":", "try", ":", "os", ".", "kill", "(", "pid", ",", "signal", ".", "SIGTERM", ")", "os", ".", "waitpid", "(", "pid", ",", "0", ")", "except", "OSError", ",", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ESRCH", "or", "e", ".", "errno", "==", "errno", ".", "ECHILD", ":", "# No such process", "pass", "else", ":", "raise" ]
Kill all workers.
[ "Kill", "all", "workers", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L950-L965
240,244
diffeo/rejester
rejester/workers.py
ForkWorker.scram
def scram(self): '''Kill all workers and die ourselves. This runs in response to SIGABRT, from a specific invocation of the ``kill`` command. It also runs if :meth:`stop_gracefully` is called more than once. ''' self.stop_all_children() signal.signal(signal.SIGTERM, signal.SIG_DFL) sys.exit(2)
python
def scram(self): '''Kill all workers and die ourselves. This runs in response to SIGABRT, from a specific invocation of the ``kill`` command. It also runs if :meth:`stop_gracefully` is called more than once. ''' self.stop_all_children() signal.signal(signal.SIGTERM, signal.SIG_DFL) sys.exit(2)
[ "def", "scram", "(", "self", ")", ":", "self", ".", "stop_all_children", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "signal", ".", "SIG_DFL", ")", "sys", ".", "exit", "(", "2", ")" ]
Kill all workers and die ourselves. This runs in response to SIGABRT, from a specific invocation of the ``kill`` command. It also runs if :meth:`stop_gracefully` is called more than once.
[ "Kill", "all", "workers", "and", "die", "ourselves", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L967-L977
240,245
diffeo/rejester
rejester/workers.py
ForkWorker.run
def run(self): '''Run the main loop. This is fairly invasive: it sets a bunch of signal handlers and spawns off a bunch of child processes. ''' setproctitle('rejester fork_worker for namespace {0}' .format(self.config.get('namespace', None))) self.set_signal_handlers() try: self.start_log_child() while True: can_start_more = not self.shutting_down if time.time() >= self.heartbeat_deadline: mode = self.heartbeat() if mode != self.last_mode: self.log(logging.INFO, 'rejester global mode is {0!r}'.format(mode)) self.last_mode = mode self.heartbeat_deadline = (time.time() + self.heartbeat_interval) self.check_spinning_children() else: mode = self.last_mode if mode != self.task_master.RUN: can_start_more = False interval = self.do_some_work(can_start_more) # Normal shutdown case if len(self.children) == 0: if mode == self.task_master.TERMINATE: self.log(logging.INFO, 'stopping for rejester global shutdown') break if self.shutting_down: self.log(logging.INFO, 'stopping in response to signal') break time.sleep(interval) except Exception: self.log(logging.CRITICAL, 'uncaught exception in worker: ' + traceback.format_exc()) finally: # See the note in run_workers() above. clear_signal_handlers() # calls signal.signal() which explicitly affects the current # process, parent or child. self.clear_signal_handlers()
python
def run(self): '''Run the main loop. This is fairly invasive: it sets a bunch of signal handlers and spawns off a bunch of child processes. ''' setproctitle('rejester fork_worker for namespace {0}' .format(self.config.get('namespace', None))) self.set_signal_handlers() try: self.start_log_child() while True: can_start_more = not self.shutting_down if time.time() >= self.heartbeat_deadline: mode = self.heartbeat() if mode != self.last_mode: self.log(logging.INFO, 'rejester global mode is {0!r}'.format(mode)) self.last_mode = mode self.heartbeat_deadline = (time.time() + self.heartbeat_interval) self.check_spinning_children() else: mode = self.last_mode if mode != self.task_master.RUN: can_start_more = False interval = self.do_some_work(can_start_more) # Normal shutdown case if len(self.children) == 0: if mode == self.task_master.TERMINATE: self.log(logging.INFO, 'stopping for rejester global shutdown') break if self.shutting_down: self.log(logging.INFO, 'stopping in response to signal') break time.sleep(interval) except Exception: self.log(logging.CRITICAL, 'uncaught exception in worker: ' + traceback.format_exc()) finally: # See the note in run_workers() above. clear_signal_handlers() # calls signal.signal() which explicitly affects the current # process, parent or child. self.clear_signal_handlers()
[ "def", "run", "(", "self", ")", ":", "setproctitle", "(", "'rejester fork_worker for namespace {0}'", ".", "format", "(", "self", ".", "config", ".", "get", "(", "'namespace'", ",", "None", ")", ")", ")", "self", ".", "set_signal_handlers", "(", ")", "try", ":", "self", ".", "start_log_child", "(", ")", "while", "True", ":", "can_start_more", "=", "not", "self", ".", "shutting_down", "if", "time", ".", "time", "(", ")", ">=", "self", ".", "heartbeat_deadline", ":", "mode", "=", "self", ".", "heartbeat", "(", ")", "if", "mode", "!=", "self", ".", "last_mode", ":", "self", ".", "log", "(", "logging", ".", "INFO", ",", "'rejester global mode is {0!r}'", ".", "format", "(", "mode", ")", ")", "self", ".", "last_mode", "=", "mode", "self", ".", "heartbeat_deadline", "=", "(", "time", ".", "time", "(", ")", "+", "self", ".", "heartbeat_interval", ")", "self", ".", "check_spinning_children", "(", ")", "else", ":", "mode", "=", "self", ".", "last_mode", "if", "mode", "!=", "self", ".", "task_master", ".", "RUN", ":", "can_start_more", "=", "False", "interval", "=", "self", ".", "do_some_work", "(", "can_start_more", ")", "# Normal shutdown case", "if", "len", "(", "self", ".", "children", ")", "==", "0", ":", "if", "mode", "==", "self", ".", "task_master", ".", "TERMINATE", ":", "self", ".", "log", "(", "logging", ".", "INFO", ",", "'stopping for rejester global shutdown'", ")", "break", "if", "self", ".", "shutting_down", ":", "self", ".", "log", "(", "logging", ".", "INFO", ",", "'stopping in response to signal'", ")", "break", "time", ".", "sleep", "(", "interval", ")", "except", "Exception", ":", "self", ".", "log", "(", "logging", ".", "CRITICAL", ",", "'uncaught exception in worker: '", "+", "traceback", ".", "format_exc", "(", ")", ")", "finally", ":", "# See the note in run_workers() above. clear_signal_handlers()", "# calls signal.signal() which explicitly affects the current", "# process, parent or child.", "self", ".", "clear_signal_handlers", "(", ")" ]
Run the main loop. This is fairly invasive: it sets a bunch of signal handlers and spawns off a bunch of child processes.
[ "Run", "the", "main", "loop", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/workers.py#L979-L1025
240,246
hobson/pug-dj
pug/dj/crawlnmine/fabfile/django_fabric_aws.py
instance
def instance(): """ Creates an EC2 instance from an Ubuntu AMI and configures it as a Django server with nginx + gunicorn """ # Record the starting time and print a starting message start_time = time.time() print(_green("Started...")) # Use boto to create an EC2 instance env.host_string = _create_ec2_instance() print(_green("Waiting 30 seconds for server to boot...")) time.sleep(30) # Configure the instance that was just created for item in tasks.configure_instance: try: print(_yellow(item['message'])) except KeyError: pass globals()["_" + item['action']](item['params']) # Print out the final runtime and the public dns of the new instance end_time = time.time() print(_green("Runtime: %f minutes" % ((end_time - start_time) / 60))) print(_green("\nPLEASE ADD ADDRESS THIS TO YOUR ")), print(_yellow("project_conf.py")), print(_green(" FILE UNDER ")), print(_yellow("fabconf['EC2_INSTANCES'] : ")), print(_green(env.host_string))
python
def instance(): """ Creates an EC2 instance from an Ubuntu AMI and configures it as a Django server with nginx + gunicorn """ # Record the starting time and print a starting message start_time = time.time() print(_green("Started...")) # Use boto to create an EC2 instance env.host_string = _create_ec2_instance() print(_green("Waiting 30 seconds for server to boot...")) time.sleep(30) # Configure the instance that was just created for item in tasks.configure_instance: try: print(_yellow(item['message'])) except KeyError: pass globals()["_" + item['action']](item['params']) # Print out the final runtime and the public dns of the new instance end_time = time.time() print(_green("Runtime: %f minutes" % ((end_time - start_time) / 60))) print(_green("\nPLEASE ADD ADDRESS THIS TO YOUR ")), print(_yellow("project_conf.py")), print(_green(" FILE UNDER ")), print(_yellow("fabconf['EC2_INSTANCES'] : ")), print(_green(env.host_string))
[ "def", "instance", "(", ")", ":", "# Record the starting time and print a starting message", "start_time", "=", "time", ".", "time", "(", ")", "print", "(", "_green", "(", "\"Started...\"", ")", ")", "# Use boto to create an EC2 instance", "env", ".", "host_string", "=", "_create_ec2_instance", "(", ")", "print", "(", "_green", "(", "\"Waiting 30 seconds for server to boot...\"", ")", ")", "time", ".", "sleep", "(", "30", ")", "# Configure the instance that was just created", "for", "item", "in", "tasks", ".", "configure_instance", ":", "try", ":", "print", "(", "_yellow", "(", "item", "[", "'message'", "]", ")", ")", "except", "KeyError", ":", "pass", "globals", "(", ")", "[", "\"_\"", "+", "item", "[", "'action'", "]", "]", "(", "item", "[", "'params'", "]", ")", "# Print out the final runtime and the public dns of the new instance", "end_time", "=", "time", ".", "time", "(", ")", "print", "(", "_green", "(", "\"Runtime: %f minutes\"", "%", "(", "(", "end_time", "-", "start_time", ")", "/", "60", ")", ")", ")", "print", "(", "_green", "(", "\"\\nPLEASE ADD ADDRESS THIS TO YOUR \"", ")", ")", ",", "print", "(", "_yellow", "(", "\"project_conf.py\"", ")", ")", ",", "print", "(", "_green", "(", "\" FILE UNDER \"", ")", ")", ",", "print", "(", "_yellow", "(", "\"fabconf['EC2_INSTANCES'] : \"", ")", ")", ",", "print", "(", "_green", "(", "env", ".", "host_string", ")", ")" ]
Creates an EC2 instance from an Ubuntu AMI and configures it as a Django server with nginx + gunicorn
[ "Creates", "an", "EC2", "instance", "from", "an", "Ubuntu", "AMI", "and", "configures", "it", "as", "a", "Django", "server", "with", "nginx", "+", "gunicorn" ]
55678b08755a55366ce18e7d3b8ea8fa4491ab04
https://github.com/hobson/pug-dj/blob/55678b08755a55366ce18e7d3b8ea8fa4491ab04/pug/dj/crawlnmine/fabfile/django_fabric_aws.py#L64-L93
240,247
hobson/pug-dj
pug/dj/crawlnmine/fabfile/django_fabric_aws.py
_run_task
def _run_task(task, start_message, finished_message): """ Tasks a task from tasks.py and runs through the commands on the server """ # Get the hosts and record the start time env.hosts = fabconf['EC2_INSTANCES'] start = time.time() # Check if any hosts exist if env.hosts == []: print("There are EC2 instances defined in project_conf.py, please add some instances and try again") print("or run 'fab spawn_instance' to create an instance") return # Print the starting message print(_yellow(start_message)) # Run the task items for item in task: try: print(_yellow(item['message'])) except KeyError: pass globals()["_" + item['action']](item['params']) # Print the final message and the elapsed time print(_yellow("%s in %.2fs" % (finished_message, time.time() - start)))
python
def _run_task(task, start_message, finished_message): """ Tasks a task from tasks.py and runs through the commands on the server """ # Get the hosts and record the start time env.hosts = fabconf['EC2_INSTANCES'] start = time.time() # Check if any hosts exist if env.hosts == []: print("There are EC2 instances defined in project_conf.py, please add some instances and try again") print("or run 'fab spawn_instance' to create an instance") return # Print the starting message print(_yellow(start_message)) # Run the task items for item in task: try: print(_yellow(item['message'])) except KeyError: pass globals()["_" + item['action']](item['params']) # Print the final message and the elapsed time print(_yellow("%s in %.2fs" % (finished_message, time.time() - start)))
[ "def", "_run_task", "(", "task", ",", "start_message", ",", "finished_message", ")", ":", "# Get the hosts and record the start time", "env", ".", "hosts", "=", "fabconf", "[", "'EC2_INSTANCES'", "]", "start", "=", "time", ".", "time", "(", ")", "# Check if any hosts exist", "if", "env", ".", "hosts", "==", "[", "]", ":", "print", "(", "\"There are EC2 instances defined in project_conf.py, please add some instances and try again\"", ")", "print", "(", "\"or run 'fab spawn_instance' to create an instance\"", ")", "return", "# Print the starting message", "print", "(", "_yellow", "(", "start_message", ")", ")", "# Run the task items", "for", "item", "in", "task", ":", "try", ":", "print", "(", "_yellow", "(", "item", "[", "'message'", "]", ")", ")", "except", "KeyError", ":", "pass", "globals", "(", ")", "[", "\"_\"", "+", "item", "[", "'action'", "]", "]", "(", "item", "[", "'params'", "]", ")", "# Print the final message and the elapsed time", "print", "(", "_yellow", "(", "\"%s in %.2fs\"", "%", "(", "finished_message", ",", "time", ".", "time", "(", ")", "-", "start", ")", ")", ")" ]
Tasks a task from tasks.py and runs through the commands on the server
[ "Tasks", "a", "task", "from", "tasks", ".", "py", "and", "runs", "through", "the", "commands", "on", "the", "server" ]
55678b08755a55366ce18e7d3b8ea8fa4491ab04
https://github.com/hobson/pug-dj/blob/55678b08755a55366ce18e7d3b8ea8fa4491ab04/pug/dj/crawlnmine/fabfile/django_fabric_aws.py#L143-L170
240,248
diffeo/yakonfig
yakonfig/factory.py
AutoFactory.create
def create(self, configurable, config=None, **kwargs): '''Create a sub-object of this factory. Instantiates the `configurable` object with the current saved :attr:`config`. This essentially translates to ``configurable(**config)``, except services defined in the parent and requested by `configurable` (by setting the ``services`` attribute) are injected. If a service is not defined on this factory object, then a :exc:`yakonfig.ProgrammerError` is raised. If `config` is provided, it is a local configuration for `configurable`, and it overrides the saved local configuration (if any). If not provided, then :attr:`config` must already be set, possibly by passing this object into the :mod:`yakonfig` top-level setup sequence. :param callable configurable: object to create :param dict config: local configuration for `configurable` :param kwargs: additional keyword parameters :return: ``configurable(**config)`` ''' # If we got passed a string, find the thing to make. if isinstance(configurable, string_types): candidates = [ac for ac in self.sub_modules if ac.config_name == configurable] if len(candidates) == 0: raise KeyError(configurable) configurable = candidates[0] # Regenerate the configuration if need be. if not isinstance(configurable, AutoConfigured): configurable = AutoConfigured.from_obj(configurable) if config is None: config = self.config.get(configurable.config_name, {}) # Iteratively build up the argument list. If you explicitly # called this function with a config dictionary with extra # parameters, those will be lost. params = {} for other, default in iteritems(configurable.default_config): params[other] = kwargs.get(other, config.get(other, default)) for other in getattr(configurable, 'services', []): # AutoConfigured.check_config() validates that this key # wasn't in the global config, so this must have come from # either our own config parameter, a keyword arg, or the # caller setting factory.config; trust those paths. if other == 'config': params[other] = dict(config, **kwargs) elif other in kwargs: params[other] = kwargs[other] elif other in config: params[other] = config[other] else: # We're not catching an `AttributeError` exception # here because it may case a net too wide which makes # debugging underlying errors more difficult. params[other] = getattr(self, other) return configurable(**params)
python
def create(self, configurable, config=None, **kwargs): '''Create a sub-object of this factory. Instantiates the `configurable` object with the current saved :attr:`config`. This essentially translates to ``configurable(**config)``, except services defined in the parent and requested by `configurable` (by setting the ``services`` attribute) are injected. If a service is not defined on this factory object, then a :exc:`yakonfig.ProgrammerError` is raised. If `config` is provided, it is a local configuration for `configurable`, and it overrides the saved local configuration (if any). If not provided, then :attr:`config` must already be set, possibly by passing this object into the :mod:`yakonfig` top-level setup sequence. :param callable configurable: object to create :param dict config: local configuration for `configurable` :param kwargs: additional keyword parameters :return: ``configurable(**config)`` ''' # If we got passed a string, find the thing to make. if isinstance(configurable, string_types): candidates = [ac for ac in self.sub_modules if ac.config_name == configurable] if len(candidates) == 0: raise KeyError(configurable) configurable = candidates[0] # Regenerate the configuration if need be. if not isinstance(configurable, AutoConfigured): configurable = AutoConfigured.from_obj(configurable) if config is None: config = self.config.get(configurable.config_name, {}) # Iteratively build up the argument list. If you explicitly # called this function with a config dictionary with extra # parameters, those will be lost. params = {} for other, default in iteritems(configurable.default_config): params[other] = kwargs.get(other, config.get(other, default)) for other in getattr(configurable, 'services', []): # AutoConfigured.check_config() validates that this key # wasn't in the global config, so this must have come from # either our own config parameter, a keyword arg, or the # caller setting factory.config; trust those paths. if other == 'config': params[other] = dict(config, **kwargs) elif other in kwargs: params[other] = kwargs[other] elif other in config: params[other] = config[other] else: # We're not catching an `AttributeError` exception # here because it may case a net too wide which makes # debugging underlying errors more difficult. params[other] = getattr(self, other) return configurable(**params)
[ "def", "create", "(", "self", ",", "configurable", ",", "config", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# If we got passed a string, find the thing to make.", "if", "isinstance", "(", "configurable", ",", "string_types", ")", ":", "candidates", "=", "[", "ac", "for", "ac", "in", "self", ".", "sub_modules", "if", "ac", ".", "config_name", "==", "configurable", "]", "if", "len", "(", "candidates", ")", "==", "0", ":", "raise", "KeyError", "(", "configurable", ")", "configurable", "=", "candidates", "[", "0", "]", "# Regenerate the configuration if need be.", "if", "not", "isinstance", "(", "configurable", ",", "AutoConfigured", ")", ":", "configurable", "=", "AutoConfigured", ".", "from_obj", "(", "configurable", ")", "if", "config", "is", "None", ":", "config", "=", "self", ".", "config", ".", "get", "(", "configurable", ".", "config_name", ",", "{", "}", ")", "# Iteratively build up the argument list. If you explicitly", "# called this function with a config dictionary with extra", "# parameters, those will be lost.", "params", "=", "{", "}", "for", "other", ",", "default", "in", "iteritems", "(", "configurable", ".", "default_config", ")", ":", "params", "[", "other", "]", "=", "kwargs", ".", "get", "(", "other", ",", "config", ".", "get", "(", "other", ",", "default", ")", ")", "for", "other", "in", "getattr", "(", "configurable", ",", "'services'", ",", "[", "]", ")", ":", "# AutoConfigured.check_config() validates that this key", "# wasn't in the global config, so this must have come from", "# either our own config parameter, a keyword arg, or the", "# caller setting factory.config; trust those paths.", "if", "other", "==", "'config'", ":", "params", "[", "other", "]", "=", "dict", "(", "config", ",", "*", "*", "kwargs", ")", "elif", "other", "in", "kwargs", ":", "params", "[", "other", "]", "=", "kwargs", "[", "other", "]", "elif", "other", "in", "config", ":", "params", "[", "other", "]", "=", "config", "[", "other", "]", "else", ":", "# We're not catching an `AttributeError` exception", "# here because it may case a net too wide which makes", "# debugging underlying errors more difficult.", "params", "[", "other", "]", "=", "getattr", "(", "self", ",", "other", ")", "return", "configurable", "(", "*", "*", "params", ")" ]
Create a sub-object of this factory. Instantiates the `configurable` object with the current saved :attr:`config`. This essentially translates to ``configurable(**config)``, except services defined in the parent and requested by `configurable` (by setting the ``services`` attribute) are injected. If a service is not defined on this factory object, then a :exc:`yakonfig.ProgrammerError` is raised. If `config` is provided, it is a local configuration for `configurable`, and it overrides the saved local configuration (if any). If not provided, then :attr:`config` must already be set, possibly by passing this object into the :mod:`yakonfig` top-level setup sequence. :param callable configurable: object to create :param dict config: local configuration for `configurable` :param kwargs: additional keyword parameters :return: ``configurable(**config)``
[ "Create", "a", "sub", "-", "object", "of", "this", "factory", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/factory.py#L130-L190
240,249
diffeo/yakonfig
yakonfig/factory.py
AutoConfigured.from_obj
def from_obj(cls, obj, any_configurable=False): '''Create a proxy object from a callable. If `any_configurable` is true, `obj` takes a parameter named ``config``, and `obj` smells like it implements :class:`yakonfig.Configurable` (it has a :attr:`~yakonfig.Configurable.config_name`), then return it directly. ''' discovered = cls.inspect_obj(obj) if ((any_configurable and 'config' in discovered['required'] and hasattr(obj, 'config_name'))): return obj return cls(obj, discovered['name'], discovered['required'], discovered['defaults'])
python
def from_obj(cls, obj, any_configurable=False): '''Create a proxy object from a callable. If `any_configurable` is true, `obj` takes a parameter named ``config``, and `obj` smells like it implements :class:`yakonfig.Configurable` (it has a :attr:`~yakonfig.Configurable.config_name`), then return it directly. ''' discovered = cls.inspect_obj(obj) if ((any_configurable and 'config' in discovered['required'] and hasattr(obj, 'config_name'))): return obj return cls(obj, discovered['name'], discovered['required'], discovered['defaults'])
[ "def", "from_obj", "(", "cls", ",", "obj", ",", "any_configurable", "=", "False", ")", ":", "discovered", "=", "cls", ".", "inspect_obj", "(", "obj", ")", "if", "(", "(", "any_configurable", "and", "'config'", "in", "discovered", "[", "'required'", "]", "and", "hasattr", "(", "obj", ",", "'config_name'", ")", ")", ")", ":", "return", "obj", "return", "cls", "(", "obj", ",", "discovered", "[", "'name'", "]", ",", "discovered", "[", "'required'", "]", ",", "discovered", "[", "'defaults'", "]", ")" ]
Create a proxy object from a callable. If `any_configurable` is true, `obj` takes a parameter named ``config``, and `obj` smells like it implements :class:`yakonfig.Configurable` (it has a :attr:`~yakonfig.Configurable.config_name`), then return it directly.
[ "Create", "a", "proxy", "object", "from", "a", "callable", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/factory.py#L212-L228
240,250
diffeo/yakonfig
yakonfig/factory.py
AutoConfigured.check_config
def check_config(self, config, name=''): '''Check that the configuration for this object is valid. This is a more restrictive check than for most :mod:`yakonfig` objects. It will raise :exc:`yakonfig.ConfigurationError` if `config` contains any keys that are not in the underlying callable's parameter list (that is, extra unused configuration options). This will also raise an exception if `config` contains keys that duplicate parameters that should be provided by the factory. .. note:: This last behavior is subject to change; future versions of the library may allow configuration to provide local configuration for a factory-provided object. :param dict config: the parent configuration dictionary, probably contains :attr:`config_name` as a key :param str name: qualified name of this object in the configuration :raise: :exc:`yakonfig.ConfigurationError` if excess parameters exist ''' # This is assuming that `config` is the config dictionary of # the *config parent*. That is, `config[self.config_name]` # exists. config = config.get(self.config_name, {}) # Complain about additional parameters, unless this is an # older object that's expecting a config dictionary. extras = set(config.keys()).difference(self.default_config) if 'config' not in self.services and extras: raise ConfigurationError( 'Unsupported config options for "%s": %s' % (self.config_name, ', '.join(extras))) # This only happens if you went out of your way to # circumvent the configuration and delete a parameter. missing = set(self.default_config).difference(config) if missing: raise ConfigurationError( 'Missing config options for "%s": %s' % (self.config_name, ', '.join(missing))) # Did caller try to provide parameter(s) that we also expect # the factory to provide? duplicates = set(config.keys()).intersection(set(self.services)) if duplicates: # N.B. I don't think the parameter can come from the # default config because Python will not let you have # `arg` and `arg=val` in the same parameter # list. (`discover_config`, below, guarantees that # positional and named parameters are disjoint.) raise ConfigurationError( 'Disallowed config options for "%s": %s' % (self.config_name, ', '.join(duplicates)))
python
def check_config(self, config, name=''): '''Check that the configuration for this object is valid. This is a more restrictive check than for most :mod:`yakonfig` objects. It will raise :exc:`yakonfig.ConfigurationError` if `config` contains any keys that are not in the underlying callable's parameter list (that is, extra unused configuration options). This will also raise an exception if `config` contains keys that duplicate parameters that should be provided by the factory. .. note:: This last behavior is subject to change; future versions of the library may allow configuration to provide local configuration for a factory-provided object. :param dict config: the parent configuration dictionary, probably contains :attr:`config_name` as a key :param str name: qualified name of this object in the configuration :raise: :exc:`yakonfig.ConfigurationError` if excess parameters exist ''' # This is assuming that `config` is the config dictionary of # the *config parent*. That is, `config[self.config_name]` # exists. config = config.get(self.config_name, {}) # Complain about additional parameters, unless this is an # older object that's expecting a config dictionary. extras = set(config.keys()).difference(self.default_config) if 'config' not in self.services and extras: raise ConfigurationError( 'Unsupported config options for "%s": %s' % (self.config_name, ', '.join(extras))) # This only happens if you went out of your way to # circumvent the configuration and delete a parameter. missing = set(self.default_config).difference(config) if missing: raise ConfigurationError( 'Missing config options for "%s": %s' % (self.config_name, ', '.join(missing))) # Did caller try to provide parameter(s) that we also expect # the factory to provide? duplicates = set(config.keys()).intersection(set(self.services)) if duplicates: # N.B. I don't think the parameter can come from the # default config because Python will not let you have # `arg` and `arg=val` in the same parameter # list. (`discover_config`, below, guarantees that # positional and named parameters are disjoint.) raise ConfigurationError( 'Disallowed config options for "%s": %s' % (self.config_name, ', '.join(duplicates)))
[ "def", "check_config", "(", "self", ",", "config", ",", "name", "=", "''", ")", ":", "# This is assuming that `config` is the config dictionary of", "# the *config parent*. That is, `config[self.config_name]`", "# exists.", "config", "=", "config", ".", "get", "(", "self", ".", "config_name", ",", "{", "}", ")", "# Complain about additional parameters, unless this is an", "# older object that's expecting a config dictionary.", "extras", "=", "set", "(", "config", ".", "keys", "(", ")", ")", ".", "difference", "(", "self", ".", "default_config", ")", "if", "'config'", "not", "in", "self", ".", "services", "and", "extras", ":", "raise", "ConfigurationError", "(", "'Unsupported config options for \"%s\": %s'", "%", "(", "self", ".", "config_name", ",", "', '", ".", "join", "(", "extras", ")", ")", ")", "# This only happens if you went out of your way to", "# circumvent the configuration and delete a parameter.", "missing", "=", "set", "(", "self", ".", "default_config", ")", ".", "difference", "(", "config", ")", "if", "missing", ":", "raise", "ConfigurationError", "(", "'Missing config options for \"%s\": %s'", "%", "(", "self", ".", "config_name", ",", "', '", ".", "join", "(", "missing", ")", ")", ")", "# Did caller try to provide parameter(s) that we also expect", "# the factory to provide?", "duplicates", "=", "set", "(", "config", ".", "keys", "(", ")", ")", ".", "intersection", "(", "set", "(", "self", ".", "services", ")", ")", "if", "duplicates", ":", "# N.B. I don't think the parameter can come from the", "# default config because Python will not let you have", "# `arg` and `arg=val` in the same parameter", "# list. (`discover_config`, below, guarantees that", "# positional and named parameters are disjoint.)", "raise", "ConfigurationError", "(", "'Disallowed config options for \"%s\": %s'", "%", "(", "self", ".", "config_name", ",", "', '", ".", "join", "(", "duplicates", ")", ")", ")" ]
Check that the configuration for this object is valid. This is a more restrictive check than for most :mod:`yakonfig` objects. It will raise :exc:`yakonfig.ConfigurationError` if `config` contains any keys that are not in the underlying callable's parameter list (that is, extra unused configuration options). This will also raise an exception if `config` contains keys that duplicate parameters that should be provided by the factory. .. note:: This last behavior is subject to change; future versions of the library may allow configuration to provide local configuration for a factory-provided object. :param dict config: the parent configuration dictionary, probably contains :attr:`config_name` as a key :param str name: qualified name of this object in the configuration :raise: :exc:`yakonfig.ConfigurationError` if excess parameters exist
[ "Check", "that", "the", "configuration", "for", "this", "object", "is", "valid", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/factory.py#L248-L302
240,251
diffeo/yakonfig
yakonfig/factory.py
AutoConfigured.inspect_obj
def inspect_obj(obj): '''Learn what there is to be learned from our target. Given an object at `obj`, which must be a function, method or class, return a configuration *discovered* from the name of the object and its parameter list. This function is responsible for doing runtime reflection and providing understandable failure modes. The return value is a dictionary with three keys: ``name``, ``required`` and ``defaults``. ``name`` is the name of the function/method/class. ``required`` is a list of parameters *without* default values. ``defaults`` is a dictionary mapping parameter names to default values. The sets of parameter names in ``required`` and ``defaults`` are disjoint. When given a class, the parameters are taken from its ``__init__`` method. Note that this function is purposefully conservative in the things that is will auto-configure. All of the following things will result in a :exc:`yakonfig.ProgrammerError` exception being raised: 1. A parameter list that contains tuple unpacking. (This is invalid syntax in Python 3.) 2. A parameter list that contains variable arguments (``*args``) or variable keyword words (``**kwargs``). This restriction forces an auto-configurable to explicitly state all configuration. Similarly, if given an object that isn't a function/method/class, a :exc:`yakonfig.ProgrammerError` will be raised. If reflection cannot be performed on ``obj``, then a ``TypeError`` is raised. ''' skip_params = 0 if inspect.isfunction(obj): name = obj.__name__ inspect_obj = obj skip_params = 0 elif inspect.ismethod(obj): name = obj.im_func.__name__ inspect_obj = obj skip_params = 1 # self elif inspect.isclass(obj): inspect_obj = None if hasattr(obj, '__dict__') and '__new__' in obj.__dict__: inspect_obj = obj.__new__ elif hasattr(obj, '__init__'): inspect_obj = obj.__init__ else: raise ProgrammerError( 'Class "%s" does not have a "__new__" or "__init__" ' 'method, so it cannot be auto configured.' % str(obj)) name = obj.__name__ if hasattr(obj, 'config_name'): name = obj.config_name if not inspect.ismethod(inspect_obj) \ and not inspect.isfunction(inspect_obj): raise ProgrammerError( '"%s.%s" is not a method/function (it is a "%s").' % (str(obj), inspect_obj.__name__, type(inspect_obj))) skip_params = 1 # self else: raise ProgrammerError( 'Expected a function, method or class to ' 'automatically configure, but got a "%s" ' '(type: "%s").' % (repr(obj), type(obj))) argspec = inspect.getargspec(inspect_obj) if argspec.varargs is not None or argspec.keywords is not None: raise ProgrammerError( 'The auto-configurable "%s" cannot contain ' '"*args" or "**kwargs" in its list of ' 'parameters.' % repr(obj)) if not all(isinstance(arg, string_types) for arg in argspec.args): raise ProgrammerError( 'Expected an auto-configurable with no nested ' 'parameters, but "%s" seems to contain some ' 'tuple unpacking: "%s"' % (repr(obj), argspec.args)) defaults = argspec.defaults or [] # The index into `argspec.args` at which keyword arguments with default # values starts. i_defaults = len(argspec.args) - len(defaults) return { 'name': name, 'required': argspec.args[skip_params:i_defaults], 'defaults': dict([(k, defaults[i]) for i, k in enumerate(argspec.args[i_defaults:])]), }
python
def inspect_obj(obj): '''Learn what there is to be learned from our target. Given an object at `obj`, which must be a function, method or class, return a configuration *discovered* from the name of the object and its parameter list. This function is responsible for doing runtime reflection and providing understandable failure modes. The return value is a dictionary with three keys: ``name``, ``required`` and ``defaults``. ``name`` is the name of the function/method/class. ``required`` is a list of parameters *without* default values. ``defaults`` is a dictionary mapping parameter names to default values. The sets of parameter names in ``required`` and ``defaults`` are disjoint. When given a class, the parameters are taken from its ``__init__`` method. Note that this function is purposefully conservative in the things that is will auto-configure. All of the following things will result in a :exc:`yakonfig.ProgrammerError` exception being raised: 1. A parameter list that contains tuple unpacking. (This is invalid syntax in Python 3.) 2. A parameter list that contains variable arguments (``*args``) or variable keyword words (``**kwargs``). This restriction forces an auto-configurable to explicitly state all configuration. Similarly, if given an object that isn't a function/method/class, a :exc:`yakonfig.ProgrammerError` will be raised. If reflection cannot be performed on ``obj``, then a ``TypeError`` is raised. ''' skip_params = 0 if inspect.isfunction(obj): name = obj.__name__ inspect_obj = obj skip_params = 0 elif inspect.ismethod(obj): name = obj.im_func.__name__ inspect_obj = obj skip_params = 1 # self elif inspect.isclass(obj): inspect_obj = None if hasattr(obj, '__dict__') and '__new__' in obj.__dict__: inspect_obj = obj.__new__ elif hasattr(obj, '__init__'): inspect_obj = obj.__init__ else: raise ProgrammerError( 'Class "%s" does not have a "__new__" or "__init__" ' 'method, so it cannot be auto configured.' % str(obj)) name = obj.__name__ if hasattr(obj, 'config_name'): name = obj.config_name if not inspect.ismethod(inspect_obj) \ and not inspect.isfunction(inspect_obj): raise ProgrammerError( '"%s.%s" is not a method/function (it is a "%s").' % (str(obj), inspect_obj.__name__, type(inspect_obj))) skip_params = 1 # self else: raise ProgrammerError( 'Expected a function, method or class to ' 'automatically configure, but got a "%s" ' '(type: "%s").' % (repr(obj), type(obj))) argspec = inspect.getargspec(inspect_obj) if argspec.varargs is not None or argspec.keywords is not None: raise ProgrammerError( 'The auto-configurable "%s" cannot contain ' '"*args" or "**kwargs" in its list of ' 'parameters.' % repr(obj)) if not all(isinstance(arg, string_types) for arg in argspec.args): raise ProgrammerError( 'Expected an auto-configurable with no nested ' 'parameters, but "%s" seems to contain some ' 'tuple unpacking: "%s"' % (repr(obj), argspec.args)) defaults = argspec.defaults or [] # The index into `argspec.args` at which keyword arguments with default # values starts. i_defaults = len(argspec.args) - len(defaults) return { 'name': name, 'required': argspec.args[skip_params:i_defaults], 'defaults': dict([(k, defaults[i]) for i, k in enumerate(argspec.args[i_defaults:])]), }
[ "def", "inspect_obj", "(", "obj", ")", ":", "skip_params", "=", "0", "if", "inspect", ".", "isfunction", "(", "obj", ")", ":", "name", "=", "obj", ".", "__name__", "inspect_obj", "=", "obj", "skip_params", "=", "0", "elif", "inspect", ".", "ismethod", "(", "obj", ")", ":", "name", "=", "obj", ".", "im_func", ".", "__name__", "inspect_obj", "=", "obj", "skip_params", "=", "1", "# self", "elif", "inspect", ".", "isclass", "(", "obj", ")", ":", "inspect_obj", "=", "None", "if", "hasattr", "(", "obj", ",", "'__dict__'", ")", "and", "'__new__'", "in", "obj", ".", "__dict__", ":", "inspect_obj", "=", "obj", ".", "__new__", "elif", "hasattr", "(", "obj", ",", "'__init__'", ")", ":", "inspect_obj", "=", "obj", ".", "__init__", "else", ":", "raise", "ProgrammerError", "(", "'Class \"%s\" does not have a \"__new__\" or \"__init__\" '", "'method, so it cannot be auto configured.'", "%", "str", "(", "obj", ")", ")", "name", "=", "obj", ".", "__name__", "if", "hasattr", "(", "obj", ",", "'config_name'", ")", ":", "name", "=", "obj", ".", "config_name", "if", "not", "inspect", ".", "ismethod", "(", "inspect_obj", ")", "and", "not", "inspect", ".", "isfunction", "(", "inspect_obj", ")", ":", "raise", "ProgrammerError", "(", "'\"%s.%s\" is not a method/function (it is a \"%s\").'", "%", "(", "str", "(", "obj", ")", ",", "inspect_obj", ".", "__name__", ",", "type", "(", "inspect_obj", ")", ")", ")", "skip_params", "=", "1", "# self", "else", ":", "raise", "ProgrammerError", "(", "'Expected a function, method or class to '", "'automatically configure, but got a \"%s\" '", "'(type: \"%s\").'", "%", "(", "repr", "(", "obj", ")", ",", "type", "(", "obj", ")", ")", ")", "argspec", "=", "inspect", ".", "getargspec", "(", "inspect_obj", ")", "if", "argspec", ".", "varargs", "is", "not", "None", "or", "argspec", ".", "keywords", "is", "not", "None", ":", "raise", "ProgrammerError", "(", "'The auto-configurable \"%s\" cannot contain '", "'\"*args\" or \"**kwargs\" in its list of '", "'parameters.'", "%", "repr", "(", "obj", ")", ")", "if", "not", "all", "(", "isinstance", "(", "arg", ",", "string_types", ")", "for", "arg", "in", "argspec", ".", "args", ")", ":", "raise", "ProgrammerError", "(", "'Expected an auto-configurable with no nested '", "'parameters, but \"%s\" seems to contain some '", "'tuple unpacking: \"%s\"'", "%", "(", "repr", "(", "obj", ")", ",", "argspec", ".", "args", ")", ")", "defaults", "=", "argspec", ".", "defaults", "or", "[", "]", "# The index into `argspec.args` at which keyword arguments with default", "# values starts.", "i_defaults", "=", "len", "(", "argspec", ".", "args", ")", "-", "len", "(", "defaults", ")", "return", "{", "'name'", ":", "name", ",", "'required'", ":", "argspec", ".", "args", "[", "skip_params", ":", "i_defaults", "]", ",", "'defaults'", ":", "dict", "(", "[", "(", "k", ",", "defaults", "[", "i", "]", ")", "for", "i", ",", "k", "in", "enumerate", "(", "argspec", ".", "args", "[", "i_defaults", ":", "]", ")", "]", ")", ",", "}" ]
Learn what there is to be learned from our target. Given an object at `obj`, which must be a function, method or class, return a configuration *discovered* from the name of the object and its parameter list. This function is responsible for doing runtime reflection and providing understandable failure modes. The return value is a dictionary with three keys: ``name``, ``required`` and ``defaults``. ``name`` is the name of the function/method/class. ``required`` is a list of parameters *without* default values. ``defaults`` is a dictionary mapping parameter names to default values. The sets of parameter names in ``required`` and ``defaults`` are disjoint. When given a class, the parameters are taken from its ``__init__`` method. Note that this function is purposefully conservative in the things that is will auto-configure. All of the following things will result in a :exc:`yakonfig.ProgrammerError` exception being raised: 1. A parameter list that contains tuple unpacking. (This is invalid syntax in Python 3.) 2. A parameter list that contains variable arguments (``*args``) or variable keyword words (``**kwargs``). This restriction forces an auto-configurable to explicitly state all configuration. Similarly, if given an object that isn't a function/method/class, a :exc:`yakonfig.ProgrammerError` will be raised. If reflection cannot be performed on ``obj``, then a ``TypeError`` is raised.
[ "Learn", "what", "there", "is", "to", "be", "learned", "from", "our", "target", "." ]
412e195da29b4f4fc7b72967c192714a6f5eaeb5
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/factory.py#L305-L397
240,252
listen-lavender/webcrawl
webcrawl/aboutfile.py
modulepath
def modulepath(filename): """ Find the relative path to its module of a python file if existing. filename string, name of a python file """ filepath = os.path.abspath(filename) prepath = filepath[:filepath.rindex('/')] postpath = '/' if prepath.count('/') == 0 or not os.path.exists(prepath + '/__init__.py'): flag = False else: flag = True while True: if prepath.endswith('/lib') or prepath.endswith('/bin') or prepath.endswith('/site-packages'): break elif flag and (prepath.count('/') == 0 or not os.path.exists(prepath + '/__init__.py')): break else: for f in os.listdir(prepath): if '.py' in f: break else: break postpath = prepath[prepath.rindex('/'):].split('-')[0].split('_')[0] + postpath prepath = prepath[:prepath.rindex('/')] return postpath.lstrip('/') + filename.split('/')[-1].replace('.pyc', '').replace('.py', '') + '/'
python
def modulepath(filename): """ Find the relative path to its module of a python file if existing. filename string, name of a python file """ filepath = os.path.abspath(filename) prepath = filepath[:filepath.rindex('/')] postpath = '/' if prepath.count('/') == 0 or not os.path.exists(prepath + '/__init__.py'): flag = False else: flag = True while True: if prepath.endswith('/lib') or prepath.endswith('/bin') or prepath.endswith('/site-packages'): break elif flag and (prepath.count('/') == 0 or not os.path.exists(prepath + '/__init__.py')): break else: for f in os.listdir(prepath): if '.py' in f: break else: break postpath = prepath[prepath.rindex('/'):].split('-')[0].split('_')[0] + postpath prepath = prepath[:prepath.rindex('/')] return postpath.lstrip('/') + filename.split('/')[-1].replace('.pyc', '').replace('.py', '') + '/'
[ "def", "modulepath", "(", "filename", ")", ":", "filepath", "=", "os", ".", "path", ".", "abspath", "(", "filename", ")", "prepath", "=", "filepath", "[", ":", "filepath", ".", "rindex", "(", "'/'", ")", "]", "postpath", "=", "'/'", "if", "prepath", ".", "count", "(", "'/'", ")", "==", "0", "or", "not", "os", ".", "path", ".", "exists", "(", "prepath", "+", "'/__init__.py'", ")", ":", "flag", "=", "False", "else", ":", "flag", "=", "True", "while", "True", ":", "if", "prepath", ".", "endswith", "(", "'/lib'", ")", "or", "prepath", ".", "endswith", "(", "'/bin'", ")", "or", "prepath", ".", "endswith", "(", "'/site-packages'", ")", ":", "break", "elif", "flag", "and", "(", "prepath", ".", "count", "(", "'/'", ")", "==", "0", "or", "not", "os", ".", "path", ".", "exists", "(", "prepath", "+", "'/__init__.py'", ")", ")", ":", "break", "else", ":", "for", "f", "in", "os", ".", "listdir", "(", "prepath", ")", ":", "if", "'.py'", "in", "f", ":", "break", "else", ":", "break", "postpath", "=", "prepath", "[", "prepath", ".", "rindex", "(", "'/'", ")", ":", "]", ".", "split", "(", "'-'", ")", "[", "0", "]", ".", "split", "(", "'_'", ")", "[", "0", "]", "+", "postpath", "prepath", "=", "prepath", "[", ":", "prepath", ".", "rindex", "(", "'/'", ")", "]", "return", "postpath", ".", "lstrip", "(", "'/'", ")", "+", "filename", ".", "split", "(", "'/'", ")", "[", "-", "1", "]", ".", "replace", "(", "'.pyc'", ",", "''", ")", ".", "replace", "(", "'.py'", ",", "''", ")", "+", "'/'" ]
Find the relative path to its module of a python file if existing. filename string, name of a python file
[ "Find", "the", "relative", "path", "to", "its", "module", "of", "a", "python", "file", "if", "existing", "." ]
905dcfa6e6934aac764045660c0efcef28eae1e6
https://github.com/listen-lavender/webcrawl/blob/905dcfa6e6934aac764045660c0efcef28eae1e6/webcrawl/aboutfile.py#L10-L37
240,253
the01/python-paps
paps/si/app/message.py
format_message_type
def format_message_type(message_type): """ Get printable version for message type :param message_type: Message type :type message_type: int :return: Printable version :rtype: str """ if message_type == MsgType.NOT_SET: return "NOT_SET" elif message_type == MsgType.ACK: return "ACK" elif message_type == MsgType.JOIN: return "JOIN" elif message_type == MsgType.UNJOIN: return "UNJOIN" elif message_type == MsgType.CONFIG: return "CONFIG" elif message_type == MsgType.UPDATE: return "UPDATE" elif message_type == MsgType.DATA: return "DATA" else: return u"{}".format(message_type)
python
def format_message_type(message_type): """ Get printable version for message type :param message_type: Message type :type message_type: int :return: Printable version :rtype: str """ if message_type == MsgType.NOT_SET: return "NOT_SET" elif message_type == MsgType.ACK: return "ACK" elif message_type == MsgType.JOIN: return "JOIN" elif message_type == MsgType.UNJOIN: return "UNJOIN" elif message_type == MsgType.CONFIG: return "CONFIG" elif message_type == MsgType.UPDATE: return "UPDATE" elif message_type == MsgType.DATA: return "DATA" else: return u"{}".format(message_type)
[ "def", "format_message_type", "(", "message_type", ")", ":", "if", "message_type", "==", "MsgType", ".", "NOT_SET", ":", "return", "\"NOT_SET\"", "elif", "message_type", "==", "MsgType", ".", "ACK", ":", "return", "\"ACK\"", "elif", "message_type", "==", "MsgType", ".", "JOIN", ":", "return", "\"JOIN\"", "elif", "message_type", "==", "MsgType", ".", "UNJOIN", ":", "return", "\"UNJOIN\"", "elif", "message_type", "==", "MsgType", ".", "CONFIG", ":", "return", "\"CONFIG\"", "elif", "message_type", "==", "MsgType", ".", "UPDATE", ":", "return", "\"UPDATE\"", "elif", "message_type", "==", "MsgType", ".", "DATA", ":", "return", "\"DATA\"", "else", ":", "return", "u\"{}\"", ".", "format", "(", "message_type", ")" ]
Get printable version for message type :param message_type: Message type :type message_type: int :return: Printable version :rtype: str
[ "Get", "printable", "version", "for", "message", "type" ]
2dde5a71913e4c7b22901cf05c6ecedd890919c4
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/message.py#L80-L104
240,254
the01/python-paps
paps/si/app/message.py
format_data
def format_data(data): """ Format bytes for printing :param data: Bytes :type data: None | bytearray | str :return: Printable version :rtype: unicode """ if data is None: return None return u":".join([u"{:02x}".format(ord(c)) for c in data])
python
def format_data(data): """ Format bytes for printing :param data: Bytes :type data: None | bytearray | str :return: Printable version :rtype: unicode """ if data is None: return None return u":".join([u"{:02x}".format(ord(c)) for c in data])
[ "def", "format_data", "(", "data", ")", ":", "if", "data", "is", "None", ":", "return", "None", "return", "u\":\"", ".", "join", "(", "[", "u\"{:02x}\"", ".", "format", "(", "ord", "(", "c", ")", ")", "for", "c", "in", "data", "]", ")" ]
Format bytes for printing :param data: Bytes :type data: None | bytearray | str :return: Printable version :rtype: unicode
[ "Format", "bytes", "for", "printing" ]
2dde5a71913e4c7b22901cf05c6ecedd890919c4
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/message.py#L107-L118
240,255
the01/python-paps
paps/si/app/message.py
guess_message_type
def guess_message_type(message): """ Guess the message type based on the class of message :param message: Message to guess the type for :type message: APPMessage :return: The corresponding message type (MsgType) or None if not found :rtype: None | int """ if isinstance(message, APPConfigMessage): return MsgType.CONFIG elif isinstance(message, APPJoinMessage): return MsgType.JOIN elif isinstance(message, APPDataMessage): # All inheriting from this first !! return MsgType.DATA elif isinstance(message, APPUpdateMessage): return MsgType.UPDATE elif isinstance(message, APPUnjoinMessage): return MsgType.UNJOIN # APPMessage -> ACK? return None
python
def guess_message_type(message): """ Guess the message type based on the class of message :param message: Message to guess the type for :type message: APPMessage :return: The corresponding message type (MsgType) or None if not found :rtype: None | int """ if isinstance(message, APPConfigMessage): return MsgType.CONFIG elif isinstance(message, APPJoinMessage): return MsgType.JOIN elif isinstance(message, APPDataMessage): # All inheriting from this first !! return MsgType.DATA elif isinstance(message, APPUpdateMessage): return MsgType.UPDATE elif isinstance(message, APPUnjoinMessage): return MsgType.UNJOIN # APPMessage -> ACK? return None
[ "def", "guess_message_type", "(", "message", ")", ":", "if", "isinstance", "(", "message", ",", "APPConfigMessage", ")", ":", "return", "MsgType", ".", "CONFIG", "elif", "isinstance", "(", "message", ",", "APPJoinMessage", ")", ":", "return", "MsgType", ".", "JOIN", "elif", "isinstance", "(", "message", ",", "APPDataMessage", ")", ":", "# All inheriting from this first !!", "return", "MsgType", ".", "DATA", "elif", "isinstance", "(", "message", ",", "APPUpdateMessage", ")", ":", "return", "MsgType", ".", "UPDATE", "elif", "isinstance", "(", "message", ",", "APPUnjoinMessage", ")", ":", "return", "MsgType", ".", "UNJOIN", "# APPMessage -> ACK?", "return", "None" ]
Guess the message type based on the class of message :param message: Message to guess the type for :type message: APPMessage :return: The corresponding message type (MsgType) or None if not found :rtype: None | int
[ "Guess", "the", "message", "type", "based", "on", "the", "class", "of", "message" ]
2dde5a71913e4c7b22901cf05c6ecedd890919c4
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/message.py#L145-L166
240,256
the01/python-paps
paps/si/app/message.py
APPHeader.timestamp_localize
def timestamp_localize(value): """ Save timestamp as utc :param value: Timestamp (in UTC or with tz_info) :type value: float | datetime.datetime :return: Localized timestamp :rtype: float """ if isinstance(value, datetime.datetime): if not value.tzinfo: value = pytz.UTC.localize(value) else: value = value.astimezone(pytz.UTC) # Assumes utc (and add the microsecond part) value = calendar.timegm(value.timetuple()) + \ value.microsecond / 1e6 return value
python
def timestamp_localize(value): """ Save timestamp as utc :param value: Timestamp (in UTC or with tz_info) :type value: float | datetime.datetime :return: Localized timestamp :rtype: float """ if isinstance(value, datetime.datetime): if not value.tzinfo: value = pytz.UTC.localize(value) else: value = value.astimezone(pytz.UTC) # Assumes utc (and add the microsecond part) value = calendar.timegm(value.timetuple()) + \ value.microsecond / 1e6 return value
[ "def", "timestamp_localize", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "datetime", ".", "datetime", ")", ":", "if", "not", "value", ".", "tzinfo", ":", "value", "=", "pytz", ".", "UTC", ".", "localize", "(", "value", ")", "else", ":", "value", "=", "value", ".", "astimezone", "(", "pytz", ".", "UTC", ")", "# Assumes utc (and add the microsecond part)", "value", "=", "calendar", ".", "timegm", "(", "value", ".", "timetuple", "(", ")", ")", "+", "value", ".", "microsecond", "/", "1e6", "return", "value" ]
Save timestamp as utc :param value: Timestamp (in UTC or with tz_info) :type value: float | datetime.datetime :return: Localized timestamp :rtype: float
[ "Save", "timestamp", "as", "utc" ]
2dde5a71913e4c7b22901cf05c6ecedd890919c4
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/message.py#L257-L274
240,257
the01/python-paps
paps/si/app/message.py
APPHeader.set_timestamp_to_current
def set_timestamp_to_current(self): """ Set timestamp to current time utc :rtype: None """ # Good form to add tzinfo self.timestamp = pytz.UTC.localize(datetime.datetime.utcnow())
python
def set_timestamp_to_current(self): """ Set timestamp to current time utc :rtype: None """ # Good form to add tzinfo self.timestamp = pytz.UTC.localize(datetime.datetime.utcnow())
[ "def", "set_timestamp_to_current", "(", "self", ")", ":", "# Good form to add tzinfo", "self", ".", "timestamp", "=", "pytz", ".", "UTC", ".", "localize", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ")" ]
Set timestamp to current time utc :rtype: None
[ "Set", "timestamp", "to", "current", "time", "utc" ]
2dde5a71913e4c7b22901cf05c6ecedd890919c4
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/message.py#L313-L320
240,258
the01/python-paps
paps/si/app/message.py
APPMessage.update
def update(self, obj): """ Set this instance up based on another instance :param obj: Instance to copy from :type obj: APPMessage :rtype: None """ if isinstance(obj, APPMessage): self._header = obj._header self._payload = obj._payload
python
def update(self, obj): """ Set this instance up based on another instance :param obj: Instance to copy from :type obj: APPMessage :rtype: None """ if isinstance(obj, APPMessage): self._header = obj._header self._payload = obj._payload
[ "def", "update", "(", "self", ",", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "APPMessage", ")", ":", "self", ".", "_header", "=", "obj", ".", "_header", "self", ".", "_payload", "=", "obj", ".", "_payload" ]
Set this instance up based on another instance :param obj: Instance to copy from :type obj: APPMessage :rtype: None
[ "Set", "this", "instance", "up", "based", "on", "another", "instance" ]
2dde5a71913e4c7b22901cf05c6ecedd890919c4
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/message.py#L477-L487
240,259
the01/python-paps
paps/si/app/message.py
APPUpdateMessage._pack_people
def _pack_people(people): """ Pack people into a network transmittable format :param people: People to pack :type people: list[paps.people.People] :return: The packed people :rtype: str """ res = bytearray() bits = bytearray([1]) for person in people: bits.extend(person.to_bits()) aByte = 0 for i, bit in enumerate(bits[::-1]): mod = i % 8 aByte |= bit << mod if mod == 7 or i == len(bits) - 1: res.append(aByte) aByte = 0 return struct.pack(APPUpdateMessage.fmt.format(len(res)), *res[::-1])
python
def _pack_people(people): """ Pack people into a network transmittable format :param people: People to pack :type people: list[paps.people.People] :return: The packed people :rtype: str """ res = bytearray() bits = bytearray([1]) for person in people: bits.extend(person.to_bits()) aByte = 0 for i, bit in enumerate(bits[::-1]): mod = i % 8 aByte |= bit << mod if mod == 7 or i == len(bits) - 1: res.append(aByte) aByte = 0 return struct.pack(APPUpdateMessage.fmt.format(len(res)), *res[::-1])
[ "def", "_pack_people", "(", "people", ")", ":", "res", "=", "bytearray", "(", ")", "bits", "=", "bytearray", "(", "[", "1", "]", ")", "for", "person", "in", "people", ":", "bits", ".", "extend", "(", "person", ".", "to_bits", "(", ")", ")", "aByte", "=", "0", "for", "i", ",", "bit", "in", "enumerate", "(", "bits", "[", ":", ":", "-", "1", "]", ")", ":", "mod", "=", "i", "%", "8", "aByte", "|=", "bit", "<<", "mod", "if", "mod", "==", "7", "or", "i", "==", "len", "(", "bits", ")", "-", "1", ":", "res", ".", "append", "(", "aByte", ")", "aByte", "=", "0", "return", "struct", ".", "pack", "(", "APPUpdateMessage", ".", "fmt", ".", "format", "(", "len", "(", "res", ")", ")", ",", "*", "res", "[", ":", ":", "-", "1", "]", ")" ]
Pack people into a network transmittable format :param people: People to pack :type people: list[paps.people.People] :return: The packed people :rtype: str
[ "Pack", "people", "into", "a", "network", "transmittable", "format" ]
2dde5a71913e4c7b22901cf05c6ecedd890919c4
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/message.py#L723-L745
240,260
se-esss-litterbox/Pynac
Pynac/Elements.py
Quad.from_dynacRepr
def from_dynacRepr(cls, pynacRepr): """ Construct a ``Quad`` instance from the Pynac lattice element """ L = float(pynacRepr[1][0][0]) B = float(pynacRepr[1][0][1]) aperRadius = float(pynacRepr[1][0][2]) return cls(L, B, aperRadius)
python
def from_dynacRepr(cls, pynacRepr): """ Construct a ``Quad`` instance from the Pynac lattice element """ L = float(pynacRepr[1][0][0]) B = float(pynacRepr[1][0][1]) aperRadius = float(pynacRepr[1][0][2]) return cls(L, B, aperRadius)
[ "def", "from_dynacRepr", "(", "cls", ",", "pynacRepr", ")", ":", "L", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "0", "]", ")", "B", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "1", "]", ")", "aperRadius", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "2", "]", ")", "return", "cls", "(", "L", ",", "B", ",", "aperRadius", ")" ]
Construct a ``Quad`` instance from the Pynac lattice element
[ "Construct", "a", "Quad", "instance", "from", "the", "Pynac", "lattice", "element" ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L46-L53
240,261
se-esss-litterbox/Pynac
Pynac/Elements.py
Quad.dynacRepresentation
def dynacRepresentation(self): """ Return the Pynac representation of this quadrupole instance. """ return ['QUADRUPO', [[self.L.val, self.B.val, self.aperRadius.val]]]
python
def dynacRepresentation(self): """ Return the Pynac representation of this quadrupole instance. """ return ['QUADRUPO', [[self.L.val, self.B.val, self.aperRadius.val]]]
[ "def", "dynacRepresentation", "(", "self", ")", ":", "return", "[", "'QUADRUPO'", ",", "[", "[", "self", ".", "L", ".", "val", ",", "self", ".", "B", ".", "val", ",", "self", ".", "aperRadius", ".", "val", "]", "]", "]" ]
Return the Pynac representation of this quadrupole instance.
[ "Return", "the", "Pynac", "representation", "of", "this", "quadrupole", "instance", "." ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L71-L75
240,262
se-esss-litterbox/Pynac
Pynac/Elements.py
CavityAnalytic.from_dynacRepr
def from_dynacRepr(cls, pynacRepr): """ Construct a ``CavityAnalytic`` instance from the Pynac lattice element """ cavID = int(pynacRepr[1][0][0]) xesln = float(pynacRepr[1][1][0]) phase = float(pynacRepr[1][1][1]) fieldReduction = float(pynacRepr[1][1][2]) isec = int(pynacRepr[1][1][3]) return cls(phase, fieldReduction, cavID, xesln, isec)
python
def from_dynacRepr(cls, pynacRepr): """ Construct a ``CavityAnalytic`` instance from the Pynac lattice element """ cavID = int(pynacRepr[1][0][0]) xesln = float(pynacRepr[1][1][0]) phase = float(pynacRepr[1][1][1]) fieldReduction = float(pynacRepr[1][1][2]) isec = int(pynacRepr[1][1][3]) return cls(phase, fieldReduction, cavID, xesln, isec)
[ "def", "from_dynacRepr", "(", "cls", ",", "pynacRepr", ")", ":", "cavID", "=", "int", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "0", "]", ")", "xesln", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "1", "]", "[", "0", "]", ")", "phase", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "1", "]", "[", "1", "]", ")", "fieldReduction", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "1", "]", "[", "2", "]", ")", "isec", "=", "int", "(", "pynacRepr", "[", "1", "]", "[", "1", "]", "[", "3", "]", ")", "return", "cls", "(", "phase", ",", "fieldReduction", ",", "cavID", ",", "xesln", ",", "isec", ")" ]
Construct a ``CavityAnalytic`` instance from the Pynac lattice element
[ "Construct", "a", "CavityAnalytic", "instance", "from", "the", "Pynac", "lattice", "element" ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L102-L111
240,263
se-esss-litterbox/Pynac
Pynac/Elements.py
CavityAnalytic.adjustPhase
def adjustPhase(self, adjustment): """ Adjust the accelerating phase of the cavity by the value of ``adjustment``. The adjustment is additive, so a value of ``scalingFactor = 0.0`` will result in no change of the phase. """ self.phase = self.phase._replace(val = self.phase.val + adjustment)
python
def adjustPhase(self, adjustment): """ Adjust the accelerating phase of the cavity by the value of ``adjustment``. The adjustment is additive, so a value of ``scalingFactor = 0.0`` will result in no change of the phase. """ self.phase = self.phase._replace(val = self.phase.val + adjustment)
[ "def", "adjustPhase", "(", "self", ",", "adjustment", ")", ":", "self", ".", "phase", "=", "self", ".", "phase", ".", "_replace", "(", "val", "=", "self", ".", "phase", ".", "val", "+", "adjustment", ")" ]
Adjust the accelerating phase of the cavity by the value of ``adjustment``. The adjustment is additive, so a value of ``scalingFactor = 0.0`` will result in no change of the phase.
[ "Adjust", "the", "accelerating", "phase", "of", "the", "cavity", "by", "the", "value", "of", "adjustment", ".", "The", "adjustment", "is", "additive", "so", "a", "value", "of", "scalingFactor", "=", "0", ".", "0", "will", "result", "in", "no", "change", "of", "the", "phase", "." ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L113-L119
240,264
se-esss-litterbox/Pynac
Pynac/Elements.py
CavityAnalytic.scaleField
def scaleField(self, scalingFactor): """ Adjust the accelerating field of the cavity by the value of ``scalingFactor``. The adjustment is multiplicative, so a value of ``scalingFactor = 1.0`` will result in no change of the field. """ oldField = self.fieldReduction.val newField = 100.0 * (scalingFactor * (1.0 + oldField/100.0) - 1.0) self.fieldReduction = self.fieldReduction._replace(val = newField)
python
def scaleField(self, scalingFactor): """ Adjust the accelerating field of the cavity by the value of ``scalingFactor``. The adjustment is multiplicative, so a value of ``scalingFactor = 1.0`` will result in no change of the field. """ oldField = self.fieldReduction.val newField = 100.0 * (scalingFactor * (1.0 + oldField/100.0) - 1.0) self.fieldReduction = self.fieldReduction._replace(val = newField)
[ "def", "scaleField", "(", "self", ",", "scalingFactor", ")", ":", "oldField", "=", "self", ".", "fieldReduction", ".", "val", "newField", "=", "100.0", "*", "(", "scalingFactor", "*", "(", "1.0", "+", "oldField", "/", "100.0", ")", "-", "1.0", ")", "self", ".", "fieldReduction", "=", "self", ".", "fieldReduction", ".", "_replace", "(", "val", "=", "newField", ")" ]
Adjust the accelerating field of the cavity by the value of ``scalingFactor``. The adjustment is multiplicative, so a value of ``scalingFactor = 1.0`` will result in no change of the field.
[ "Adjust", "the", "accelerating", "field", "of", "the", "cavity", "by", "the", "value", "of", "scalingFactor", ".", "The", "adjustment", "is", "multiplicative", "so", "a", "value", "of", "scalingFactor", "=", "1", ".", "0", "will", "result", "in", "no", "change", "of", "the", "field", "." ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L121-L129
240,265
se-esss-litterbox/Pynac
Pynac/Elements.py
CavityAnalytic.dynacRepresentation
def dynacRepresentation(self): """ Return the Dynac representation of this cavity instance. """ return ['CAVMC', [ [self.cavID.val], [self.xesln.val, self.phase.val, self.fieldReduction.val, self.isec.val, 1], ]]
python
def dynacRepresentation(self): """ Return the Dynac representation of this cavity instance. """ return ['CAVMC', [ [self.cavID.val], [self.xesln.val, self.phase.val, self.fieldReduction.val, self.isec.val, 1], ]]
[ "def", "dynacRepresentation", "(", "self", ")", ":", "return", "[", "'CAVMC'", ",", "[", "[", "self", ".", "cavID", ".", "val", "]", ",", "[", "self", ".", "xesln", ".", "val", ",", "self", ".", "phase", ".", "val", ",", "self", ".", "fieldReduction", ".", "val", ",", "self", ".", "isec", ".", "val", ",", "1", "]", ",", "]", "]" ]
Return the Dynac representation of this cavity instance.
[ "Return", "the", "Dynac", "representation", "of", "this", "cavity", "instance", "." ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L131-L138
240,266
se-esss-litterbox/Pynac
Pynac/Elements.py
AccGap.from_dynacRepr
def from_dynacRepr(cls, pynacRepr): """ Construct a ``AccGap`` instance from the Pynac lattice element """ pynacList = pynacRepr[1][0] L = float(pynacList[3]) TTF = float(pynacList[4]) TTFprime = float(pynacList[5]) TTFprimeprime = float(pynacList[13]) EField = float(pynacList[10]) phase = float(pynacList[11]) F = float(pynacList[14]) atten = float(pynacList[15]) gap = cls(L, TTF, TTFprime, TTFprimeprime, EField, phase, F, atten) gap.gapID = Param(val = int(pynacList[0]), unit = None) gap.energy = Param(val = float(pynacList[1]), unit = 'MeV') gap.beta = Param(val = float(pynacList[2]), unit = None) gap.S = Param(val = float(pynacList[6]), unit = None) gap.SP = Param(val = float(pynacList[7]), unit = None) gap.quadLength = Param(val = float(pynacList[8]), unit = 'cm') gap.quadStrength = Param(val = float(pynacList[9]), unit = 'kG/cm') gap.accumLen = Param(val = float(pynacList[12]), unit = 'cm') return gap
python
def from_dynacRepr(cls, pynacRepr): """ Construct a ``AccGap`` instance from the Pynac lattice element """ pynacList = pynacRepr[1][0] L = float(pynacList[3]) TTF = float(pynacList[4]) TTFprime = float(pynacList[5]) TTFprimeprime = float(pynacList[13]) EField = float(pynacList[10]) phase = float(pynacList[11]) F = float(pynacList[14]) atten = float(pynacList[15]) gap = cls(L, TTF, TTFprime, TTFprimeprime, EField, phase, F, atten) gap.gapID = Param(val = int(pynacList[0]), unit = None) gap.energy = Param(val = float(pynacList[1]), unit = 'MeV') gap.beta = Param(val = float(pynacList[2]), unit = None) gap.S = Param(val = float(pynacList[6]), unit = None) gap.SP = Param(val = float(pynacList[7]), unit = None) gap.quadLength = Param(val = float(pynacList[8]), unit = 'cm') gap.quadStrength = Param(val = float(pynacList[9]), unit = 'kG/cm') gap.accumLen = Param(val = float(pynacList[12]), unit = 'cm') return gap
[ "def", "from_dynacRepr", "(", "cls", ",", "pynacRepr", ")", ":", "pynacList", "=", "pynacRepr", "[", "1", "]", "[", "0", "]", "L", "=", "float", "(", "pynacList", "[", "3", "]", ")", "TTF", "=", "float", "(", "pynacList", "[", "4", "]", ")", "TTFprime", "=", "float", "(", "pynacList", "[", "5", "]", ")", "TTFprimeprime", "=", "float", "(", "pynacList", "[", "13", "]", ")", "EField", "=", "float", "(", "pynacList", "[", "10", "]", ")", "phase", "=", "float", "(", "pynacList", "[", "11", "]", ")", "F", "=", "float", "(", "pynacList", "[", "14", "]", ")", "atten", "=", "float", "(", "pynacList", "[", "15", "]", ")", "gap", "=", "cls", "(", "L", ",", "TTF", ",", "TTFprime", ",", "TTFprimeprime", ",", "EField", ",", "phase", ",", "F", ",", "atten", ")", "gap", ".", "gapID", "=", "Param", "(", "val", "=", "int", "(", "pynacList", "[", "0", "]", ")", ",", "unit", "=", "None", ")", "gap", ".", "energy", "=", "Param", "(", "val", "=", "float", "(", "pynacList", "[", "1", "]", ")", ",", "unit", "=", "'MeV'", ")", "gap", ".", "beta", "=", "Param", "(", "val", "=", "float", "(", "pynacList", "[", "2", "]", ")", ",", "unit", "=", "None", ")", "gap", ".", "S", "=", "Param", "(", "val", "=", "float", "(", "pynacList", "[", "6", "]", ")", ",", "unit", "=", "None", ")", "gap", ".", "SP", "=", "Param", "(", "val", "=", "float", "(", "pynacList", "[", "7", "]", ")", ",", "unit", "=", "None", ")", "gap", ".", "quadLength", "=", "Param", "(", "val", "=", "float", "(", "pynacList", "[", "8", "]", ")", ",", "unit", "=", "'cm'", ")", "gap", ".", "quadStrength", "=", "Param", "(", "val", "=", "float", "(", "pynacList", "[", "9", "]", ")", ",", "unit", "=", "'kG/cm'", ")", "gap", ".", "accumLen", "=", "Param", "(", "val", "=", "float", "(", "pynacList", "[", "12", "]", ")", ",", "unit", "=", "'cm'", ")", "return", "gap" ]
Construct a ``AccGap`` instance from the Pynac lattice element
[ "Construct", "a", "AccGap", "instance", "from", "the", "Pynac", "lattice", "element" ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L207-L232
240,267
se-esss-litterbox/Pynac
Pynac/Elements.py
AccGap.dynacRepresentation
def dynacRepresentation(self): """ Return the Dynac representation of this accelerating gap instance. """ details = [ self.gapID.val, self.energy.val, self.beta.val, self.L.val, self.TTF.val, self.TTFprime.val, self.S.val, self.SP.val, self.quadLength.val, self.quadStrength.val, self.EField.val, self.phase.val, self.accumLen.val, self.TTFprimeprime.val, self.F.val, self.atten.val, ] return ['CAVSC', [details]]
python
def dynacRepresentation(self): """ Return the Dynac representation of this accelerating gap instance. """ details = [ self.gapID.val, self.energy.val, self.beta.val, self.L.val, self.TTF.val, self.TTFprime.val, self.S.val, self.SP.val, self.quadLength.val, self.quadStrength.val, self.EField.val, self.phase.val, self.accumLen.val, self.TTFprimeprime.val, self.F.val, self.atten.val, ] return ['CAVSC', [details]]
[ "def", "dynacRepresentation", "(", "self", ")", ":", "details", "=", "[", "self", ".", "gapID", ".", "val", ",", "self", ".", "energy", ".", "val", ",", "self", ".", "beta", ".", "val", ",", "self", ".", "L", ".", "val", ",", "self", ".", "TTF", ".", "val", ",", "self", ".", "TTFprime", ".", "val", ",", "self", ".", "S", ".", "val", ",", "self", ".", "SP", ".", "val", ",", "self", ".", "quadLength", ".", "val", ",", "self", ".", "quadStrength", ".", "val", ",", "self", ".", "EField", ".", "val", ",", "self", ".", "phase", ".", "val", ",", "self", ".", "accumLen", ".", "val", ",", "self", ".", "TTFprimeprime", ".", "val", ",", "self", ".", "F", ".", "val", ",", "self", ".", "atten", ".", "val", ",", "]", "return", "[", "'CAVSC'", ",", "[", "details", "]", "]" ]
Return the Dynac representation of this accelerating gap instance.
[ "Return", "the", "Dynac", "representation", "of", "this", "accelerating", "gap", "instance", "." ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L234-L256
240,268
se-esss-litterbox/Pynac
Pynac/Elements.py
Set4DAperture.from_dynacRepr
def from_dynacRepr(cls, pynacRepr): """ Construct a ``Set4DAperture`` instance from the Pynac lattice element """ energyDefnFlag = int(pynacRepr[1][0][0]) energy = float(pynacRepr[1][0][1]) phase = float(pynacRepr[1][0][2]) x = float(pynacRepr[1][0][3]) y = float(pynacRepr[1][0][4]) radius = float(pynacRepr[1][0][5]) return cls(energy, phase, x, y, radius, energyDefnFlag)
python
def from_dynacRepr(cls, pynacRepr): """ Construct a ``Set4DAperture`` instance from the Pynac lattice element """ energyDefnFlag = int(pynacRepr[1][0][0]) energy = float(pynacRepr[1][0][1]) phase = float(pynacRepr[1][0][2]) x = float(pynacRepr[1][0][3]) y = float(pynacRepr[1][0][4]) radius = float(pynacRepr[1][0][5]) return cls(energy, phase, x, y, radius, energyDefnFlag)
[ "def", "from_dynacRepr", "(", "cls", ",", "pynacRepr", ")", ":", "energyDefnFlag", "=", "int", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "0", "]", ")", "energy", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "1", "]", ")", "phase", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "2", "]", ")", "x", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "3", "]", ")", "y", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "4", "]", ")", "radius", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "5", "]", ")", "return", "cls", "(", "energy", ",", "phase", ",", "x", ",", "y", ",", "radius", ",", "energyDefnFlag", ")" ]
Construct a ``Set4DAperture`` instance from the Pynac lattice element
[ "Construct", "a", "Set4DAperture", "instance", "from", "the", "Pynac", "lattice", "element" ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L281-L292
240,269
se-esss-litterbox/Pynac
Pynac/Elements.py
Steerer.from_dynacRepr
def from_dynacRepr(cls, pynacRepr): """ Construct a ``Steerer`` instance from the Pynac lattice element """ f = float(pynacRepr[1][0][0]) p = 'HV'[int(pynacRepr[1][0][1])] return cls(f, p)
python
def from_dynacRepr(cls, pynacRepr): """ Construct a ``Steerer`` instance from the Pynac lattice element """ f = float(pynacRepr[1][0][0]) p = 'HV'[int(pynacRepr[1][0][1])] return cls(f, p)
[ "def", "from_dynacRepr", "(", "cls", ",", "pynacRepr", ")", ":", "f", "=", "float", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "0", "]", ")", "p", "=", "'HV'", "[", "int", "(", "pynacRepr", "[", "1", "]", "[", "0", "]", "[", "1", "]", ")", "]", "return", "cls", "(", "f", ",", "p", ")" ]
Construct a ``Steerer`` instance from the Pynac lattice element
[ "Construct", "a", "Steerer", "instance", "from", "the", "Pynac", "lattice", "element" ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L390-L396
240,270
se-esss-litterbox/Pynac
Pynac/Elements.py
Steerer.dynacRepresentation
def dynacRepresentation(self): """ Return the Dynac representation of this steerer instance. """ if self.plane.val == 'H': p = 0 elif self.plane.val == 'V': p = 1 return ['STEER', [[self.field_strength.val], [p]]]
python
def dynacRepresentation(self): """ Return the Dynac representation of this steerer instance. """ if self.plane.val == 'H': p = 0 elif self.plane.val == 'V': p = 1 return ['STEER', [[self.field_strength.val], [p]]]
[ "def", "dynacRepresentation", "(", "self", ")", ":", "if", "self", ".", "plane", ".", "val", "==", "'H'", ":", "p", "=", "0", "elif", "self", ".", "plane", ".", "val", "==", "'V'", ":", "p", "=", "1", "return", "[", "'STEER'", ",", "[", "[", "self", ".", "field_strength", ".", "val", "]", ",", "[", "p", "]", "]", "]" ]
Return the Dynac representation of this steerer instance.
[ "Return", "the", "Dynac", "representation", "of", "this", "steerer", "instance", "." ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Elements.py#L398-L406
240,271
Archived-Object/ligament
ligament/__init__.py
main
def main(): """ parse command line opts and run a skeleton file when called from the command line, ligament looks in the current working directory for a file called `skeleton.py`. Tasks specified from the command line are then executed in order, and if the -w flag was specified, ligament then watches the filesystem for changes to prompt task re-execution; """ options = None try: options, args = getopt.gnu_getopt( sys.argv[1:], "whqv", ["watch", "help", "query", "verbose"]) except getopt.GetoptError as e: print e print_helptext() exit(1) should_watch = False query_skeleton = False verbose = False for opt, arg in options: if opt == "--watch" or opt == '-w': should_watch = True elif opt == "--query" or opt == '-q': query_skeleton = True elif opt == "--help" or opt == '-h': print_helptext() exit(0) elif opt == "--verbose" or opt == '-v': verbose = True else: print "option '%s' not recognized" % opt print_helptext() exit(1) if verbose: helpers.set_verbosity(".*") if query_skeleton: print " ".join(ligament.query_skeleton("./skeleton.py")) helpers.set_verbosity() else: helpers.add_verbosity_groups("build_task") ligament.run_skeleton( "./skeleton.py", ["default"] if len(args) == 0 else args, watch=should_watch)
python
def main(): """ parse command line opts and run a skeleton file when called from the command line, ligament looks in the current working directory for a file called `skeleton.py`. Tasks specified from the command line are then executed in order, and if the -w flag was specified, ligament then watches the filesystem for changes to prompt task re-execution; """ options = None try: options, args = getopt.gnu_getopt( sys.argv[1:], "whqv", ["watch", "help", "query", "verbose"]) except getopt.GetoptError as e: print e print_helptext() exit(1) should_watch = False query_skeleton = False verbose = False for opt, arg in options: if opt == "--watch" or opt == '-w': should_watch = True elif opt == "--query" or opt == '-q': query_skeleton = True elif opt == "--help" or opt == '-h': print_helptext() exit(0) elif opt == "--verbose" or opt == '-v': verbose = True else: print "option '%s' not recognized" % opt print_helptext() exit(1) if verbose: helpers.set_verbosity(".*") if query_skeleton: print " ".join(ligament.query_skeleton("./skeleton.py")) helpers.set_verbosity() else: helpers.add_verbosity_groups("build_task") ligament.run_skeleton( "./skeleton.py", ["default"] if len(args) == 0 else args, watch=should_watch)
[ "def", "main", "(", ")", ":", "options", "=", "None", "try", ":", "options", ",", "args", "=", "getopt", ".", "gnu_getopt", "(", "sys", ".", "argv", "[", "1", ":", "]", ",", "\"whqv\"", ",", "[", "\"watch\"", ",", "\"help\"", ",", "\"query\"", ",", "\"verbose\"", "]", ")", "except", "getopt", ".", "GetoptError", "as", "e", ":", "print", "e", "print_helptext", "(", ")", "exit", "(", "1", ")", "should_watch", "=", "False", "query_skeleton", "=", "False", "verbose", "=", "False", "for", "opt", ",", "arg", "in", "options", ":", "if", "opt", "==", "\"--watch\"", "or", "opt", "==", "'-w'", ":", "should_watch", "=", "True", "elif", "opt", "==", "\"--query\"", "or", "opt", "==", "'-q'", ":", "query_skeleton", "=", "True", "elif", "opt", "==", "\"--help\"", "or", "opt", "==", "'-h'", ":", "print_helptext", "(", ")", "exit", "(", "0", ")", "elif", "opt", "==", "\"--verbose\"", "or", "opt", "==", "'-v'", ":", "verbose", "=", "True", "else", ":", "print", "\"option '%s' not recognized\"", "%", "opt", "print_helptext", "(", ")", "exit", "(", "1", ")", "if", "verbose", ":", "helpers", ".", "set_verbosity", "(", "\".*\"", ")", "if", "query_skeleton", ":", "print", "\" \"", ".", "join", "(", "ligament", ".", "query_skeleton", "(", "\"./skeleton.py\"", ")", ")", "helpers", ".", "set_verbosity", "(", ")", "else", ":", "helpers", ".", "add_verbosity_groups", "(", "\"build_task\"", ")", "ligament", ".", "run_skeleton", "(", "\"./skeleton.py\"", ",", "[", "\"default\"", "]", "if", "len", "(", "args", ")", "==", "0", "else", "args", ",", "watch", "=", "should_watch", ")" ]
parse command line opts and run a skeleton file when called from the command line, ligament looks in the current working directory for a file called `skeleton.py`. Tasks specified from the command line are then executed in order, and if the -w flag was specified, ligament then watches the filesystem for changes to prompt task re-execution;
[ "parse", "command", "line", "opts", "and", "run", "a", "skeleton", "file" ]
ff3d78130522676a20dc64086dc8a27b197cc20f
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/__init__.py#L26-L78
240,272
b3j0f/schema
b3j0f/schema/base.py
Schema._getter
def _getter(self, obj): """Called when the parent element tries to get this property value. :param obj: parent object. """ result = None if self._fget_ is not None: result = self._fget_(obj) if result is None: result = getattr(obj, self._attrname(), self._default_) # notify parent schema about returned value if isinstance(obj, Schema): obj._getvalue(self, result) return result
python
def _getter(self, obj): """Called when the parent element tries to get this property value. :param obj: parent object. """ result = None if self._fget_ is not None: result = self._fget_(obj) if result is None: result = getattr(obj, self._attrname(), self._default_) # notify parent schema about returned value if isinstance(obj, Schema): obj._getvalue(self, result) return result
[ "def", "_getter", "(", "self", ",", "obj", ")", ":", "result", "=", "None", "if", "self", ".", "_fget_", "is", "not", "None", ":", "result", "=", "self", ".", "_fget_", "(", "obj", ")", "if", "result", "is", "None", ":", "result", "=", "getattr", "(", "obj", ",", "self", ".", "_attrname", "(", ")", ",", "self", ".", "_default_", ")", "# notify parent schema about returned value", "if", "isinstance", "(", "obj", ",", "Schema", ")", ":", "obj", ".", "_getvalue", "(", "self", ",", "result", ")", "return", "result" ]
Called when the parent element tries to get this property value. :param obj: parent object.
[ "Called", "when", "the", "parent", "element", "tries", "to", "get", "this", "property", "value", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/base.py#L165-L182
240,273
b3j0f/schema
b3j0f/schema/base.py
Schema._setter
def _setter(self, obj, value): """Called when the parent element tries to set this property value. :param obj: parent object. :param value: new value to use. If lambda, updated with the lambda result. """ if isinstance(value, DynamicValue): # execute lambda values. fvalue = value() else: fvalue = value self._validate(data=fvalue, owner=obj) if self._fset_ is not None: self._fset_(obj, fvalue) else: setattr(obj, self._attrname(), value) # notify obj about the new value. if isinstance(obj, Schema): obj._setvalue(self, fvalue)
python
def _setter(self, obj, value): """Called when the parent element tries to set this property value. :param obj: parent object. :param value: new value to use. If lambda, updated with the lambda result. """ if isinstance(value, DynamicValue): # execute lambda values. fvalue = value() else: fvalue = value self._validate(data=fvalue, owner=obj) if self._fset_ is not None: self._fset_(obj, fvalue) else: setattr(obj, self._attrname(), value) # notify obj about the new value. if isinstance(obj, Schema): obj._setvalue(self, fvalue)
[ "def", "_setter", "(", "self", ",", "obj", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "DynamicValue", ")", ":", "# execute lambda values.", "fvalue", "=", "value", "(", ")", "else", ":", "fvalue", "=", "value", "self", ".", "_validate", "(", "data", "=", "fvalue", ",", "owner", "=", "obj", ")", "if", "self", ".", "_fset_", "is", "not", "None", ":", "self", ".", "_fset_", "(", "obj", ",", "fvalue", ")", "else", ":", "setattr", "(", "obj", ",", "self", ".", "_attrname", "(", ")", ",", "value", ")", "# notify obj about the new value.", "if", "isinstance", "(", "obj", ",", "Schema", ")", ":", "obj", ".", "_setvalue", "(", "self", ",", "fvalue", ")" ]
Called when the parent element tries to set this property value. :param obj: parent object. :param value: new value to use. If lambda, updated with the lambda result.
[ "Called", "when", "the", "parent", "element", "tries", "to", "set", "this", "property", "value", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/base.py#L191-L214
240,274
b3j0f/schema
b3j0f/schema/base.py
Schema._deleter
def _deleter(self, obj): """Called when the parent element tries to delete this property value. :param obj: parent object. """ if self._fdel_ is not None: self._fdel_(obj) else: delattr(obj, self._attrname()) # notify parent schema about value deletion. if isinstance(obj, Schema): obj._delvalue(self)
python
def _deleter(self, obj): """Called when the parent element tries to delete this property value. :param obj: parent object. """ if self._fdel_ is not None: self._fdel_(obj) else: delattr(obj, self._attrname()) # notify parent schema about value deletion. if isinstance(obj, Schema): obj._delvalue(self)
[ "def", "_deleter", "(", "self", ",", "obj", ")", ":", "if", "self", ".", "_fdel_", "is", "not", "None", ":", "self", ".", "_fdel_", "(", "obj", ")", "else", ":", "delattr", "(", "obj", ",", "self", ".", "_attrname", "(", ")", ")", "# notify parent schema about value deletion.", "if", "isinstance", "(", "obj", ",", "Schema", ")", ":", "obj", ".", "_delvalue", "(", "self", ")" ]
Called when the parent element tries to delete this property value. :param obj: parent object.
[ "Called", "when", "the", "parent", "element", "tries", "to", "delete", "this", "property", "value", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/base.py#L223-L236
240,275
b3j0f/schema
b3j0f/schema/base.py
Schema._validate
def _validate(self, data, owner=None): """Validate input data in returning an empty list if true. :param data: data to validate with this schema. :param Schema owner: schema owner. :raises: Exception if the data is not validated. """ if isinstance(data, DynamicValue): data = data() if data is None and not self.nullable: raise ValueError('Value can not be null') elif data is not None: isdict = isinstance(data, dict) for name, schema in iteritems(self.getschemas()): if name == 'default': continue if name in self.required: if ( (isdict and name not in data) or (not isdict and not hasattr(data, name)) ): part1 = ( 'Mandatory property {0} by {1} is missing in {2}.'. format(name, self, data) ) part2 = '{0} expected.'.format(schema) error = '{0} {1}'.format(part1, part2) raise ValueError(error) elif (isdict and name in data) or hasattr(data, name): value = data[name] if isdict else getattr(data, name) schema._validate(data=value, owner=self)
python
def _validate(self, data, owner=None): """Validate input data in returning an empty list if true. :param data: data to validate with this schema. :param Schema owner: schema owner. :raises: Exception if the data is not validated. """ if isinstance(data, DynamicValue): data = data() if data is None and not self.nullable: raise ValueError('Value can not be null') elif data is not None: isdict = isinstance(data, dict) for name, schema in iteritems(self.getschemas()): if name == 'default': continue if name in self.required: if ( (isdict and name not in data) or (not isdict and not hasattr(data, name)) ): part1 = ( 'Mandatory property {0} by {1} is missing in {2}.'. format(name, self, data) ) part2 = '{0} expected.'.format(schema) error = '{0} {1}'.format(part1, part2) raise ValueError(error) elif (isdict and name in data) or hasattr(data, name): value = data[name] if isdict else getattr(data, name) schema._validate(data=value, owner=self)
[ "def", "_validate", "(", "self", ",", "data", ",", "owner", "=", "None", ")", ":", "if", "isinstance", "(", "data", ",", "DynamicValue", ")", ":", "data", "=", "data", "(", ")", "if", "data", "is", "None", "and", "not", "self", ".", "nullable", ":", "raise", "ValueError", "(", "'Value can not be null'", ")", "elif", "data", "is", "not", "None", ":", "isdict", "=", "isinstance", "(", "data", ",", "dict", ")", "for", "name", ",", "schema", "in", "iteritems", "(", "self", ".", "getschemas", "(", ")", ")", ":", "if", "name", "==", "'default'", ":", "continue", "if", "name", "in", "self", ".", "required", ":", "if", "(", "(", "isdict", "and", "name", "not", "in", "data", ")", "or", "(", "not", "isdict", "and", "not", "hasattr", "(", "data", ",", "name", ")", ")", ")", ":", "part1", "=", "(", "'Mandatory property {0} by {1} is missing in {2}.'", ".", "format", "(", "name", ",", "self", ",", "data", ")", ")", "part2", "=", "'{0} expected.'", ".", "format", "(", "schema", ")", "error", "=", "'{0} {1}'", ".", "format", "(", "part1", ",", "part2", ")", "raise", "ValueError", "(", "error", ")", "elif", "(", "isdict", "and", "name", "in", "data", ")", "or", "hasattr", "(", "data", ",", "name", ")", ":", "value", "=", "data", "[", "name", "]", "if", "isdict", "else", "getattr", "(", "data", ",", "name", ")", "schema", ".", "_validate", "(", "data", "=", "value", ",", "owner", "=", "self", ")" ]
Validate input data in returning an empty list if true. :param data: data to validate with this schema. :param Schema owner: schema owner. :raises: Exception if the data is not validated.
[ "Validate", "input", "data", "in", "returning", "an", "empty", "list", "if", "true", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/base.py#L244-L283
240,276
b3j0f/schema
b3j0f/schema/base.py
Schema.getschemas
def getschemas(cls): """Get inner schemas by name. :return: ordered dict by name. :rtype: OrderedDict """ members = getmembers(cls, lambda member: isinstance(member, Schema)) result = OrderedDict() for name, member in members: result[name] = member return result
python
def getschemas(cls): """Get inner schemas by name. :return: ordered dict by name. :rtype: OrderedDict """ members = getmembers(cls, lambda member: isinstance(member, Schema)) result = OrderedDict() for name, member in members: result[name] = member return result
[ "def", "getschemas", "(", "cls", ")", ":", "members", "=", "getmembers", "(", "cls", ",", "lambda", "member", ":", "isinstance", "(", "member", ",", "Schema", ")", ")", "result", "=", "OrderedDict", "(", ")", "for", "name", ",", "member", "in", "members", ":", "result", "[", "name", "]", "=", "member", "return", "result" ]
Get inner schemas by name. :return: ordered dict by name. :rtype: OrderedDict
[ "Get", "inner", "schemas", "by", "name", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/base.py#L286-L299
240,277
JonLiuFYI/pkdx
pkdx/pkdx/main.py
get_ability_desc
def get_ability_desc(ability): """Return the description matching the given ability name. Check abilities.json in the same directory.""" srcpath = path.dirname(__file__) try: f = open(path.join(srcpath, 'abilities.json'), 'r') except IOError: get_abilities() f = open(path.join(srcpath, 'abilities.json'), 'r') finally: with f: return json.load(f)[ability].encode('utf-8')
python
def get_ability_desc(ability): """Return the description matching the given ability name. Check abilities.json in the same directory.""" srcpath = path.dirname(__file__) try: f = open(path.join(srcpath, 'abilities.json'), 'r') except IOError: get_abilities() f = open(path.join(srcpath, 'abilities.json'), 'r') finally: with f: return json.load(f)[ability].encode('utf-8')
[ "def", "get_ability_desc", "(", "ability", ")", ":", "srcpath", "=", "path", ".", "dirname", "(", "__file__", ")", "try", ":", "f", "=", "open", "(", "path", ".", "join", "(", "srcpath", ",", "'abilities.json'", ")", ",", "'r'", ")", "except", "IOError", ":", "get_abilities", "(", ")", "f", "=", "open", "(", "path", ".", "join", "(", "srcpath", ",", "'abilities.json'", ")", ",", "'r'", ")", "finally", ":", "with", "f", ":", "return", "json", ".", "load", "(", "f", ")", "[", "ability", "]", ".", "encode", "(", "'utf-8'", ")" ]
Return the description matching the given ability name. Check abilities.json in the same directory.
[ "Return", "the", "description", "matching", "the", "given", "ability", "name", ".", "Check", "abilities", ".", "json", "in", "the", "same", "directory", "." ]
269e9814df074e0df25972fad04539a644d73a3c
https://github.com/JonLiuFYI/pkdx/blob/269e9814df074e0df25972fad04539a644d73a3c/pkdx/pkdx/main.py#L47-L57
240,278
JonLiuFYI/pkdx
pkdx/pkdx/main.py
get_move_data
def get_move_data(move): """Return the index number for the given move name. Check moves.json in the same directory.""" srcpath = path.dirname(__file__) try: f = open(path.join(srcpath, 'moves.json'), 'r') except IOError: get_moves() f = open(path.join(srcpath, 'moves.json'), 'r') finally: with f: return json.load(f)[move]
python
def get_move_data(move): """Return the index number for the given move name. Check moves.json in the same directory.""" srcpath = path.dirname(__file__) try: f = open(path.join(srcpath, 'moves.json'), 'r') except IOError: get_moves() f = open(path.join(srcpath, 'moves.json'), 'r') finally: with f: return json.load(f)[move]
[ "def", "get_move_data", "(", "move", ")", ":", "srcpath", "=", "path", ".", "dirname", "(", "__file__", ")", "try", ":", "f", "=", "open", "(", "path", ".", "join", "(", "srcpath", ",", "'moves.json'", ")", ",", "'r'", ")", "except", "IOError", ":", "get_moves", "(", ")", "f", "=", "open", "(", "path", ".", "join", "(", "srcpath", ",", "'moves.json'", ")", ",", "'r'", ")", "finally", ":", "with", "f", ":", "return", "json", ".", "load", "(", "f", ")", "[", "move", "]" ]
Return the index number for the given move name. Check moves.json in the same directory.
[ "Return", "the", "index", "number", "for", "the", "given", "move", "name", ".", "Check", "moves", ".", "json", "in", "the", "same", "directory", "." ]
269e9814df074e0df25972fad04539a644d73a3c
https://github.com/JonLiuFYI/pkdx/blob/269e9814df074e0df25972fad04539a644d73a3c/pkdx/pkdx/main.py#L60-L70
240,279
fr33jc/bang
bang/deployers/cloud.py
SSHKeyDeployer.register
def register(self): """Registers SSH key with provider.""" log.info('Installing ssh key, %s' % self.name) self.consul.create_ssh_pub_key(self.name, self.key)
python
def register(self): """Registers SSH key with provider.""" log.info('Installing ssh key, %s' % self.name) self.consul.create_ssh_pub_key(self.name, self.key)
[ "def", "register", "(", "self", ")", ":", "log", ".", "info", "(", "'Installing ssh key, %s'", "%", "self", ".", "name", ")", "self", ".", "consul", ".", "create_ssh_pub_key", "(", "self", ".", "name", ",", "self", ".", "key", ")" ]
Registers SSH key with provider.
[ "Registers", "SSH", "key", "with", "provider", "." ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L61-L64
240,280
fr33jc/bang
bang/deployers/cloud.py
ServerDeployer.find_existing
def find_existing(self): """ Searches for existing server instances with matching tags. To match, the existing instances must also be "running". """ instances = self.consul.find_servers(self.tags) maxnames = len(instances) while instances: i = instances.pop(0) server_id = i[A.server.ID] if self.namespace.add_if_unique(server_id): log.info('Found existing server, %s' % server_id) self.server_attrs = i break if len(self.namespace.names) >= maxnames: break instances.append(i)
python
def find_existing(self): """ Searches for existing server instances with matching tags. To match, the existing instances must also be "running". """ instances = self.consul.find_servers(self.tags) maxnames = len(instances) while instances: i = instances.pop(0) server_id = i[A.server.ID] if self.namespace.add_if_unique(server_id): log.info('Found existing server, %s' % server_id) self.server_attrs = i break if len(self.namespace.names) >= maxnames: break instances.append(i)
[ "def", "find_existing", "(", "self", ")", ":", "instances", "=", "self", ".", "consul", ".", "find_servers", "(", "self", ".", "tags", ")", "maxnames", "=", "len", "(", "instances", ")", "while", "instances", ":", "i", "=", "instances", ".", "pop", "(", "0", ")", "server_id", "=", "i", "[", "A", ".", "server", ".", "ID", "]", "if", "self", ".", "namespace", ".", "add_if_unique", "(", "server_id", ")", ":", "log", ".", "info", "(", "'Found existing server, %s'", "%", "server_id", ")", "self", ".", "server_attrs", "=", "i", "break", "if", "len", "(", "self", ".", "namespace", ".", "names", ")", ">=", "maxnames", ":", "break", "instances", ".", "append", "(", "i", ")" ]
Searches for existing server instances with matching tags. To match, the existing instances must also be "running".
[ "Searches", "for", "existing", "server", "instances", "with", "matching", "tags", ".", "To", "match", "the", "existing", "instances", "must", "also", "be", "running", "." ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L85-L102
240,281
fr33jc/bang
bang/deployers/cloud.py
ServerDeployer.wait_for_running
def wait_for_running(self): """Waits for found servers to be operational""" self.server_attrs = self.consul.find_running( self.server_attrs, self.launch_timeout_s, )
python
def wait_for_running(self): """Waits for found servers to be operational""" self.server_attrs = self.consul.find_running( self.server_attrs, self.launch_timeout_s, )
[ "def", "wait_for_running", "(", "self", ")", ":", "self", ".", "server_attrs", "=", "self", ".", "consul", ".", "find_running", "(", "self", ".", "server_attrs", ",", "self", ".", "launch_timeout_s", ",", ")" ]
Waits for found servers to be operational
[ "Waits", "for", "found", "servers", "to", "be", "operational" ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L104-L109
240,282
fr33jc/bang
bang/deployers/cloud.py
ServerDeployer.create
def create(self): """Launches a new server instance.""" self.server_attrs = self.consul.create_server( "%s-%s" % (self.stack.name, self.name), self.disk_image_id, self.instance_type, self.ssh_key_name, tags=self.tags, availability_zone=self.availability_zone, timeout_s=self.launch_timeout_s, security_groups=self.security_groups, **self.provider_extras ) log.debug('Post launch delay: %d s' % self.post_launch_delay_s) time.sleep(self.post_launch_delay_s)
python
def create(self): """Launches a new server instance.""" self.server_attrs = self.consul.create_server( "%s-%s" % (self.stack.name, self.name), self.disk_image_id, self.instance_type, self.ssh_key_name, tags=self.tags, availability_zone=self.availability_zone, timeout_s=self.launch_timeout_s, security_groups=self.security_groups, **self.provider_extras ) log.debug('Post launch delay: %d s' % self.post_launch_delay_s) time.sleep(self.post_launch_delay_s)
[ "def", "create", "(", "self", ")", ":", "self", ".", "server_attrs", "=", "self", ".", "consul", ".", "create_server", "(", "\"%s-%s\"", "%", "(", "self", ".", "stack", ".", "name", ",", "self", ".", "name", ")", ",", "self", ".", "disk_image_id", ",", "self", ".", "instance_type", ",", "self", ".", "ssh_key_name", ",", "tags", "=", "self", ".", "tags", ",", "availability_zone", "=", "self", ".", "availability_zone", ",", "timeout_s", "=", "self", ".", "launch_timeout_s", ",", "security_groups", "=", "self", ".", "security_groups", ",", "*", "*", "self", ".", "provider_extras", ")", "log", ".", "debug", "(", "'Post launch delay: %d s'", "%", "self", ".", "post_launch_delay_s", ")", "time", ".", "sleep", "(", "self", ".", "post_launch_delay_s", ")" ]
Launches a new server instance.
[ "Launches", "a", "new", "server", "instance", "." ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L111-L125
240,283
fr33jc/bang
bang/deployers/cloud.py
ServerDeployer.add_to_inventory
def add_to_inventory(self): """Adds host to stack inventory""" if not self.server_attrs: return for addy in self.server_attrs[A.server.PUBLIC_IPS]: self.stack.add_host(addy, self.groups, self.hostvars)
python
def add_to_inventory(self): """Adds host to stack inventory""" if not self.server_attrs: return for addy in self.server_attrs[A.server.PUBLIC_IPS]: self.stack.add_host(addy, self.groups, self.hostvars)
[ "def", "add_to_inventory", "(", "self", ")", ":", "if", "not", "self", ".", "server_attrs", ":", "return", "for", "addy", "in", "self", ".", "server_attrs", "[", "A", ".", "server", ".", "PUBLIC_IPS", "]", ":", "self", ".", "stack", ".", "add_host", "(", "addy", ",", "self", ".", "groups", ",", "self", ".", "hostvars", ")" ]
Adds host to stack inventory
[ "Adds", "host", "to", "stack", "inventory" ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L127-L132
240,284
fr33jc/bang
bang/deployers/cloud.py
CloudManagerServerDeployer.define
def define(self): """Defines a new server.""" self.server_def = self.consul.define_server( self.name, self.server_tpl, self.server_tpl_rev, self.instance_type, self.ssh_key_name, tags=self.tags, availability_zone=self.availability_zone, security_groups=self.security_groups, **self.provider_extras ) log.debug('Defined server %s' % self.server_def)
python
def define(self): """Defines a new server.""" self.server_def = self.consul.define_server( self.name, self.server_tpl, self.server_tpl_rev, self.instance_type, self.ssh_key_name, tags=self.tags, availability_zone=self.availability_zone, security_groups=self.security_groups, **self.provider_extras ) log.debug('Defined server %s' % self.server_def)
[ "def", "define", "(", "self", ")", ":", "self", ".", "server_def", "=", "self", ".", "consul", ".", "define_server", "(", "self", ".", "name", ",", "self", ".", "server_tpl", ",", "self", ".", "server_tpl_rev", ",", "self", ".", "instance_type", ",", "self", ".", "ssh_key_name", ",", "tags", "=", "self", ".", "tags", ",", "availability_zone", "=", "self", ".", "availability_zone", ",", "security_groups", "=", "self", ".", "security_groups", ",", "*", "*", "self", ".", "provider_extras", ")", "log", ".", "debug", "(", "'Defined server %s'", "%", "self", ".", "server_def", ")" ]
Defines a new server.
[ "Defines", "a", "new", "server", "." ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L176-L189
240,285
fr33jc/bang
bang/deployers/cloud.py
SecurityGroupRulesetDeployer.find_existing
def find_existing(self): """ Finds existing rule in secgroup. Populates ``self.create_these_rules`` and ``self.delete_these_rules``. """ sg = self.consul.find_secgroup(self.name) current = sg.rules log.debug('Current rules: %s' % current) log.debug('Intended rules: %s' % self.rules) exp_rules = [] for rule in self.rules: exp = ( rule[A.secgroup.PROTOCOL], rule[A.secgroup.FROM], rule[A.secgroup.TO], rule[A.secgroup.SOURCE], ) exp_rules.append(exp) if exp in current: del current[exp] else: self.create_these_rules.append(exp) self.delete_these_rules.extend(current.itervalues()) log.debug('Create these rules: %s' % self.create_these_rules) log.debug('Delete these rules: %s' % self.delete_these_rules)
python
def find_existing(self): """ Finds existing rule in secgroup. Populates ``self.create_these_rules`` and ``self.delete_these_rules``. """ sg = self.consul.find_secgroup(self.name) current = sg.rules log.debug('Current rules: %s' % current) log.debug('Intended rules: %s' % self.rules) exp_rules = [] for rule in self.rules: exp = ( rule[A.secgroup.PROTOCOL], rule[A.secgroup.FROM], rule[A.secgroup.TO], rule[A.secgroup.SOURCE], ) exp_rules.append(exp) if exp in current: del current[exp] else: self.create_these_rules.append(exp) self.delete_these_rules.extend(current.itervalues()) log.debug('Create these rules: %s' % self.create_these_rules) log.debug('Delete these rules: %s' % self.delete_these_rules)
[ "def", "find_existing", "(", "self", ")", ":", "sg", "=", "self", ".", "consul", ".", "find_secgroup", "(", "self", ".", "name", ")", "current", "=", "sg", ".", "rules", "log", ".", "debug", "(", "'Current rules: %s'", "%", "current", ")", "log", ".", "debug", "(", "'Intended rules: %s'", "%", "self", ".", "rules", ")", "exp_rules", "=", "[", "]", "for", "rule", "in", "self", ".", "rules", ":", "exp", "=", "(", "rule", "[", "A", ".", "secgroup", ".", "PROTOCOL", "]", ",", "rule", "[", "A", ".", "secgroup", ".", "FROM", "]", ",", "rule", "[", "A", ".", "secgroup", ".", "TO", "]", ",", "rule", "[", "A", ".", "secgroup", ".", "SOURCE", "]", ",", ")", "exp_rules", ".", "append", "(", "exp", ")", "if", "exp", "in", "current", ":", "del", "current", "[", "exp", "]", "else", ":", "self", ".", "create_these_rules", ".", "append", "(", "exp", ")", "self", ".", "delete_these_rules", ".", "extend", "(", "current", ".", "itervalues", "(", ")", ")", "log", ".", "debug", "(", "'Create these rules: %s'", "%", "self", ".", "create_these_rules", ")", "log", ".", "debug", "(", "'Delete these rules: %s'", "%", "self", ".", "delete_these_rules", ")" ]
Finds existing rule in secgroup. Populates ``self.create_these_rules`` and ``self.delete_these_rules``.
[ "Finds", "existing", "rule", "in", "secgroup", "." ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L231-L260
240,286
fr33jc/bang
bang/deployers/cloud.py
SecurityGroupRulesetDeployer.apply_rule_changes
def apply_rule_changes(self): """ Makes the security group rules match what is defined in the Bang config file. """ # TODO: add error handling for rule in self.delete_these_rules: self.consul.delete_secgroup_rule(rule) log.info("Revoked: %s" % rule) for rule in self.create_these_rules: args = rule + (self.name, ) self.consul.create_secgroup_rule(*args) log.info("Authorized: %s" % str(rule))
python
def apply_rule_changes(self): """ Makes the security group rules match what is defined in the Bang config file. """ # TODO: add error handling for rule in self.delete_these_rules: self.consul.delete_secgroup_rule(rule) log.info("Revoked: %s" % rule) for rule in self.create_these_rules: args = rule + (self.name, ) self.consul.create_secgroup_rule(*args) log.info("Authorized: %s" % str(rule))
[ "def", "apply_rule_changes", "(", "self", ")", ":", "# TODO: add error handling", "for", "rule", "in", "self", ".", "delete_these_rules", ":", "self", ".", "consul", ".", "delete_secgroup_rule", "(", "rule", ")", "log", ".", "info", "(", "\"Revoked: %s\"", "%", "rule", ")", "for", "rule", "in", "self", ".", "create_these_rules", ":", "args", "=", "rule", "+", "(", "self", ".", "name", ",", ")", "self", ".", "consul", ".", "create_secgroup_rule", "(", "*", "args", ")", "log", ".", "info", "(", "\"Authorized: %s\"", "%", "str", "(", "rule", ")", ")" ]
Makes the security group rules match what is defined in the Bang config file.
[ "Makes", "the", "security", "group", "rules", "match", "what", "is", "defined", "in", "the", "Bang", "config", "file", "." ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L262-L275
240,287
fr33jc/bang
bang/deployers/cloud.py
BucketDeployer.create
def create(self): """Creates a new bucket""" self.consul.create_bucket("%s-%s" % (self.stack.name, self.name))
python
def create(self): """Creates a new bucket""" self.consul.create_bucket("%s-%s" % (self.stack.name, self.name))
[ "def", "create", "(", "self", ")", ":", "self", ".", "consul", ".", "create_bucket", "(", "\"%s-%s\"", "%", "(", "self", ".", "stack", ".", "name", ",", "self", ".", "name", ")", ")" ]
Creates a new bucket
[ "Creates", "a", "new", "bucket" ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L285-L287
240,288
fr33jc/bang
bang/deployers/cloud.py
DatabaseDeployer.create
def create(self): """Creates a new database""" self.db_attrs = self.consul.create_db( self.instance_name, self.instance_type, self.admin_username, self.admin_password, db_name=self.db_name, storage_size_gb=self.storage_size, timeout_s=self.launch_timeout_s, )
python
def create(self): """Creates a new database""" self.db_attrs = self.consul.create_db( self.instance_name, self.instance_type, self.admin_username, self.admin_password, db_name=self.db_name, storage_size_gb=self.storage_size, timeout_s=self.launch_timeout_s, )
[ "def", "create", "(", "self", ")", ":", "self", ".", "db_attrs", "=", "self", ".", "consul", ".", "create_db", "(", "self", ".", "instance_name", ",", "self", ".", "instance_type", ",", "self", ".", "admin_username", ",", "self", ".", "admin_password", ",", "db_name", "=", "self", ".", "db_name", ",", "storage_size_gb", "=", "self", ".", "storage_size", ",", "timeout_s", "=", "self", ".", "launch_timeout_s", ",", ")" ]
Creates a new database
[ "Creates", "a", "new", "database" ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L313-L323
240,289
fr33jc/bang
bang/deployers/cloud.py
DatabaseDeployer.add_to_inventory
def add_to_inventory(self): """Adds db host to stack inventory""" host = self.db_attrs.pop(A.database.HOST) self.stack.add_host( host, self.groups, self.db_attrs )
python
def add_to_inventory(self): """Adds db host to stack inventory""" host = self.db_attrs.pop(A.database.HOST) self.stack.add_host( host, self.groups, self.db_attrs )
[ "def", "add_to_inventory", "(", "self", ")", ":", "host", "=", "self", ".", "db_attrs", ".", "pop", "(", "A", ".", "database", ".", "HOST", ")", "self", ".", "stack", ".", "add_host", "(", "host", ",", "self", ".", "groups", ",", "self", ".", "db_attrs", ")" ]
Adds db host to stack inventory
[ "Adds", "db", "host", "to", "stack", "inventory" ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L325-L332
240,290
fr33jc/bang
bang/deployers/cloud.py
LoadBalancerDeployer.create
def create(self): """Creates a new load balancer""" required_nodes = self._get_required_nodes() self.lb_attrs = self.consul.create_lb( self.instance_name, protocol=self.protocol, port=self.port, nodes=required_nodes, node_port=str(self.backend_port), algorithm=getattr(self, 'algorithm', None) )
python
def create(self): """Creates a new load balancer""" required_nodes = self._get_required_nodes() self.lb_attrs = self.consul.create_lb( self.instance_name, protocol=self.protocol, port=self.port, nodes=required_nodes, node_port=str(self.backend_port), algorithm=getattr(self, 'algorithm', None) )
[ "def", "create", "(", "self", ")", ":", "required_nodes", "=", "self", ".", "_get_required_nodes", "(", ")", "self", ".", "lb_attrs", "=", "self", ".", "consul", ".", "create_lb", "(", "self", ".", "instance_name", ",", "protocol", "=", "self", ".", "protocol", ",", "port", "=", "self", ".", "port", ",", "nodes", "=", "required_nodes", ",", "node_port", "=", "str", "(", "self", ".", "backend_port", ")", ",", "algorithm", "=", "getattr", "(", "self", ",", "'algorithm'", ",", "None", ")", ")" ]
Creates a new load balancer
[ "Creates", "a", "new", "load", "balancer" ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L382-L392
240,291
fr33jc/bang
bang/deployers/cloud.py
LoadBalancerDeployer.configure_nodes
def configure_nodes(self): """Ensure that the LB's nodes matches the stack""" # Since load balancing runs after server provisioning, # the servers should already be created regardless of # whether this was a preexisting load balancer or not. # We also have the existing nodes, because add_to_inventory # has been called already required_nodes = self._get_required_nodes() log.debug( "Matching existing lb nodes to required %s (port %s)" % (", ".join(required_nodes), self.backend_port) ) self.consul.match_lb_nodes( self.lb_attrs[A.loadbalancer.ID], self.lb_attrs[A.loadbalancer.NODES_KEY], required_nodes, self.backend_port) self.lb_attrs = self.consul.lb_details( self.lb_attrs[A.loadbalancer.ID] )
python
def configure_nodes(self): """Ensure that the LB's nodes matches the stack""" # Since load balancing runs after server provisioning, # the servers should already be created regardless of # whether this was a preexisting load balancer or not. # We also have the existing nodes, because add_to_inventory # has been called already required_nodes = self._get_required_nodes() log.debug( "Matching existing lb nodes to required %s (port %s)" % (", ".join(required_nodes), self.backend_port) ) self.consul.match_lb_nodes( self.lb_attrs[A.loadbalancer.ID], self.lb_attrs[A.loadbalancer.NODES_KEY], required_nodes, self.backend_port) self.lb_attrs = self.consul.lb_details( self.lb_attrs[A.loadbalancer.ID] )
[ "def", "configure_nodes", "(", "self", ")", ":", "# Since load balancing runs after server provisioning,", "# the servers should already be created regardless of", "# whether this was a preexisting load balancer or not.", "# We also have the existing nodes, because add_to_inventory", "# has been called already", "required_nodes", "=", "self", ".", "_get_required_nodes", "(", ")", "log", ".", "debug", "(", "\"Matching existing lb nodes to required %s (port %s)\"", "%", "(", "\", \"", ".", "join", "(", "required_nodes", ")", ",", "self", ".", "backend_port", ")", ")", "self", ".", "consul", ".", "match_lb_nodes", "(", "self", ".", "lb_attrs", "[", "A", ".", "loadbalancer", ".", "ID", "]", ",", "self", ".", "lb_attrs", "[", "A", ".", "loadbalancer", ".", "NODES_KEY", "]", ",", "required_nodes", ",", "self", ".", "backend_port", ")", "self", ".", "lb_attrs", "=", "self", ".", "consul", ".", "lb_details", "(", "self", ".", "lb_attrs", "[", "A", ".", "loadbalancer", ".", "ID", "]", ")" ]
Ensure that the LB's nodes matches the stack
[ "Ensure", "that", "the", "LB", "s", "nodes", "matches", "the", "stack" ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L394-L416
240,292
fr33jc/bang
bang/deployers/cloud.py
LoadBalancerDeployer.add_to_inventory
def add_to_inventory(self): """Adds lb IPs to stack inventory""" if self.lb_attrs: self.lb_attrs = self.consul.lb_details( self.lb_attrs[A.loadbalancer.ID] ) host = self.lb_attrs['virtualIps'][0]['address'] self.stack.add_lb_secgroup(self.name, [host], self.backend_port) self.stack.add_host( host, [self.name], self.lb_attrs )
python
def add_to_inventory(self): """Adds lb IPs to stack inventory""" if self.lb_attrs: self.lb_attrs = self.consul.lb_details( self.lb_attrs[A.loadbalancer.ID] ) host = self.lb_attrs['virtualIps'][0]['address'] self.stack.add_lb_secgroup(self.name, [host], self.backend_port) self.stack.add_host( host, [self.name], self.lb_attrs )
[ "def", "add_to_inventory", "(", "self", ")", ":", "if", "self", ".", "lb_attrs", ":", "self", ".", "lb_attrs", "=", "self", ".", "consul", ".", "lb_details", "(", "self", ".", "lb_attrs", "[", "A", ".", "loadbalancer", ".", "ID", "]", ")", "host", "=", "self", ".", "lb_attrs", "[", "'virtualIps'", "]", "[", "0", "]", "[", "'address'", "]", "self", ".", "stack", ".", "add_lb_secgroup", "(", "self", ".", "name", ",", "[", "host", "]", ",", "self", ".", "backend_port", ")", "self", ".", "stack", ".", "add_host", "(", "host", ",", "[", "self", ".", "name", "]", ",", "self", ".", "lb_attrs", ")" ]
Adds lb IPs to stack inventory
[ "Adds", "lb", "IPs", "to", "stack", "inventory" ]
8f000713f88d2a9a8c1193b63ca10a6578560c16
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/deployers/cloud.py#L425-L437
240,293
Archived-Object/ligament
ligament/buildtarget.py
BuildTarget.resolve_dependencies
def resolve_dependencies(self): """ evaluate each of the data dependencies of this build target, returns the resulting dict""" return dict( [((key, self.data_dependencies[key]) if type(self.data_dependencies[key]) != DeferredDependency else (key, self.data_dependencies[key].resolve())) for key in self.data_dependencies])
python
def resolve_dependencies(self): """ evaluate each of the data dependencies of this build target, returns the resulting dict""" return dict( [((key, self.data_dependencies[key]) if type(self.data_dependencies[key]) != DeferredDependency else (key, self.data_dependencies[key].resolve())) for key in self.data_dependencies])
[ "def", "resolve_dependencies", "(", "self", ")", ":", "return", "dict", "(", "[", "(", "(", "key", ",", "self", ".", "data_dependencies", "[", "key", "]", ")", "if", "type", "(", "self", ".", "data_dependencies", "[", "key", "]", ")", "!=", "DeferredDependency", "else", "(", "key", ",", "self", ".", "data_dependencies", "[", "key", "]", ".", "resolve", "(", ")", ")", ")", "for", "key", "in", "self", ".", "data_dependencies", "]", ")" ]
evaluate each of the data dependencies of this build target, returns the resulting dict
[ "evaluate", "each", "of", "the", "data", "dependencies", "of", "this", "build", "target", "returns", "the", "resulting", "dict" ]
ff3d78130522676a20dc64086dc8a27b197cc20f
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/buildtarget.py#L76-L83
240,294
Archived-Object/ligament
ligament/buildtarget.py
BuildTarget.resolve_and_build
def resolve_and_build(self): """ resolves the dependencies of this build target and builds it """ pdebug("resolving and building task '%s'" % self.name, groups=["build_task"]) indent_text(indent="++2") toret = self.build(**self.resolve_dependencies()) indent_text(indent="--2") return toret
python
def resolve_and_build(self): """ resolves the dependencies of this build target and builds it """ pdebug("resolving and building task '%s'" % self.name, groups=["build_task"]) indent_text(indent="++2") toret = self.build(**self.resolve_dependencies()) indent_text(indent="--2") return toret
[ "def", "resolve_and_build", "(", "self", ")", ":", "pdebug", "(", "\"resolving and building task '%s'\"", "%", "self", ".", "name", ",", "groups", "=", "[", "\"build_task\"", "]", ")", "indent_text", "(", "indent", "=", "\"++2\"", ")", "toret", "=", "self", ".", "build", "(", "*", "*", "self", ".", "resolve_dependencies", "(", ")", ")", "indent_text", "(", "indent", "=", "\"--2\"", ")", "return", "toret" ]
resolves the dependencies of this build target and builds it
[ "resolves", "the", "dependencies", "of", "this", "build", "target", "and", "builds", "it" ]
ff3d78130522676a20dc64086dc8a27b197cc20f
https://github.com/Archived-Object/ligament/blob/ff3d78130522676a20dc64086dc8a27b197cc20f/ligament/buildtarget.py#L85-L92
240,295
ironfroggy/django-better-cache
bettercache/middleware.py
BetterCacheMiddleware.process_response
def process_response(self, request, response): """ Sets the cache and deals with caching headers if needed """ if not self.should_cache(request, response): # We don't need to update the cache, just return return response response = self.patch_headers(response) self.set_cache(request, response) return response
python
def process_response(self, request, response): """ Sets the cache and deals with caching headers if needed """ if not self.should_cache(request, response): # We don't need to update the cache, just return return response response = self.patch_headers(response) self.set_cache(request, response) return response
[ "def", "process_response", "(", "self", ",", "request", ",", "response", ")", ":", "if", "not", "self", ".", "should_cache", "(", "request", ",", "response", ")", ":", "# We don't need to update the cache, just return", "return", "response", "response", "=", "self", ".", "patch_headers", "(", "response", ")", "self", ".", "set_cache", "(", "request", ",", "response", ")", "return", "response" ]
Sets the cache and deals with caching headers if needed
[ "Sets", "the", "cache", "and", "deals", "with", "caching", "headers", "if", "needed" ]
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/middleware.py#L56-L66
240,296
ironfroggy/django-better-cache
bettercache/objects.py
CacheModel.keys
def keys(self): """Create an ordered dict of the names and values of key fields.""" keys = OrderedDict() def order_key(_): (k, v) = _ cache_key = getattr(type(self), k) return cache_key.order items = [(k, getattr(type(self), k)) for k in dir(type(self)) ] items = [(k, v) for (k, v) in items if isinstance(v, Key) ] for k, v in sorted(items, key=order_key): keys[k] = getattr(self, k) return keys
python
def keys(self): """Create an ordered dict of the names and values of key fields.""" keys = OrderedDict() def order_key(_): (k, v) = _ cache_key = getattr(type(self), k) return cache_key.order items = [(k, getattr(type(self), k)) for k in dir(type(self)) ] items = [(k, v) for (k, v) in items if isinstance(v, Key) ] for k, v in sorted(items, key=order_key): keys[k] = getattr(self, k) return keys
[ "def", "keys", "(", "self", ")", ":", "keys", "=", "OrderedDict", "(", ")", "def", "order_key", "(", "_", ")", ":", "(", "k", ",", "v", ")", "=", "_", "cache_key", "=", "getattr", "(", "type", "(", "self", ")", ",", "k", ")", "return", "cache_key", ".", "order", "items", "=", "[", "(", "k", ",", "getattr", "(", "type", "(", "self", ")", ",", "k", ")", ")", "for", "k", "in", "dir", "(", "type", "(", "self", ")", ")", "]", "items", "=", "[", "(", "k", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "items", "if", "isinstance", "(", "v", ",", "Key", ")", "]", "for", "k", ",", "v", "in", "sorted", "(", "items", ",", "key", "=", "order_key", ")", ":", "keys", "[", "k", "]", "=", "getattr", "(", "self", ",", "k", ")", "return", "keys" ]
Create an ordered dict of the names and values of key fields.
[ "Create", "an", "ordered", "dict", "of", "the", "names", "and", "values", "of", "key", "fields", "." ]
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/objects.py#L69-L89
240,297
ironfroggy/django-better-cache
bettercache/objects.py
CacheModel.serialize
def serialize(self): """Serialize all the fields into one string.""" keys = self._all_keys() serdata = {} for fieldname, value in self._data.items(): serdata[fieldname] = getattr(type(self), fieldname).python_to_cache(value) return json.dumps(serdata)
python
def serialize(self): """Serialize all the fields into one string.""" keys = self._all_keys() serdata = {} for fieldname, value in self._data.items(): serdata[fieldname] = getattr(type(self), fieldname).python_to_cache(value) return json.dumps(serdata)
[ "def", "serialize", "(", "self", ")", ":", "keys", "=", "self", ".", "_all_keys", "(", ")", "serdata", "=", "{", "}", "for", "fieldname", ",", "value", "in", "self", ".", "_data", ".", "items", "(", ")", ":", "serdata", "[", "fieldname", "]", "=", "getattr", "(", "type", "(", "self", ")", ",", "fieldname", ")", ".", "python_to_cache", "(", "value", ")", "return", "json", ".", "dumps", "(", "serdata", ")" ]
Serialize all the fields into one string.
[ "Serialize", "all", "the", "fields", "into", "one", "string", "." ]
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/objects.py#L110-L117
240,298
ironfroggy/django-better-cache
bettercache/objects.py
CacheModel.deserialize
def deserialize(cls, string): """Reconstruct a previously serialized string back into an instance of a ``CacheModel``.""" data = json.loads(string) for fieldname, value in data.items(): data[fieldname] = getattr(cls, fieldname).cache_to_python(value) return cls(**data)
python
def deserialize(cls, string): """Reconstruct a previously serialized string back into an instance of a ``CacheModel``.""" data = json.loads(string) for fieldname, value in data.items(): data[fieldname] = getattr(cls, fieldname).cache_to_python(value) return cls(**data)
[ "def", "deserialize", "(", "cls", ",", "string", ")", ":", "data", "=", "json", ".", "loads", "(", "string", ")", "for", "fieldname", ",", "value", "in", "data", ".", "items", "(", ")", ":", "data", "[", "fieldname", "]", "=", "getattr", "(", "cls", ",", "fieldname", ")", ".", "cache_to_python", "(", "value", ")", "return", "cls", "(", "*", "*", "data", ")" ]
Reconstruct a previously serialized string back into an instance of a ``CacheModel``.
[ "Reconstruct", "a", "previously", "serialized", "string", "back", "into", "an", "instance", "of", "a", "CacheModel", "." ]
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/objects.py#L120-L126
240,299
ironfroggy/django-better-cache
bettercache/objects.py
CacheModel.save
def save(self, expires=None): """Save a copy of the object into the cache.""" if expires is None: expires = self.expires s = self.serialize() key = self._key(self._all_keys()) _cache.set(key, s, expires)
python
def save(self, expires=None): """Save a copy of the object into the cache.""" if expires is None: expires = self.expires s = self.serialize() key = self._key(self._all_keys()) _cache.set(key, s, expires)
[ "def", "save", "(", "self", ",", "expires", "=", "None", ")", ":", "if", "expires", "is", "None", ":", "expires", "=", "self", ".", "expires", "s", "=", "self", ".", "serialize", "(", ")", "key", "=", "self", ".", "_key", "(", "self", ".", "_all_keys", "(", ")", ")", "_cache", ".", "set", "(", "key", ",", "s", ",", "expires", ")" ]
Save a copy of the object into the cache.
[ "Save", "a", "copy", "of", "the", "object", "into", "the", "cache", "." ]
5350e8c646cef1c1ca74eab176f856ddd9eaf5c3
https://github.com/ironfroggy/django-better-cache/blob/5350e8c646cef1c1ca74eab176f856ddd9eaf5c3/bettercache/objects.py#L128-L135