id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
244,000
etcher-be/emiz
emiz/avwx/translate.py
wind_shear
def wind_shear(shear: str, unit_alt: str = 'ft', unit_wind: str = 'kt', spoken: bool = False) -> str: """ Translate wind shear into a readable string Ex: Wind shear 2000ft from 140 at 30kt """ if not shear or 'WS' not in shear or '/' not in shear: return '' shear = shear[2:].rstrip(unit_wind.upper()).split('/') # type: ignore wdir = core.spoken_number(shear[1][:3]) if spoken else shear[1][:3] return f'Wind shear {int(shear[0])*100}{unit_alt} from {wdir} at {shear[1][3:]}{unit_wind}'
python
def wind_shear(shear: str, unit_alt: str = 'ft', unit_wind: str = 'kt', spoken: bool = False) -> str: """ Translate wind shear into a readable string Ex: Wind shear 2000ft from 140 at 30kt """ if not shear or 'WS' not in shear or '/' not in shear: return '' shear = shear[2:].rstrip(unit_wind.upper()).split('/') # type: ignore wdir = core.spoken_number(shear[1][:3]) if spoken else shear[1][:3] return f'Wind shear {int(shear[0])*100}{unit_alt} from {wdir} at {shear[1][3:]}{unit_wind}'
[ "def", "wind_shear", "(", "shear", ":", "str", ",", "unit_alt", ":", "str", "=", "'ft'", ",", "unit_wind", ":", "str", "=", "'kt'", ",", "spoken", ":", "bool", "=", "False", ")", "->", "str", ":", "if", "not", "shear", "or", "'WS'", "not", "in", "shear", "or", "'/'", "not", "in", "shear", ":", "return", "''", "shear", "=", "shear", "[", "2", ":", "]", ".", "rstrip", "(", "unit_wind", ".", "upper", "(", ")", ")", ".", "split", "(", "'/'", ")", "# type: ignore", "wdir", "=", "core", ".", "spoken_number", "(", "shear", "[", "1", "]", "[", ":", "3", "]", ")", "if", "spoken", "else", "shear", "[", "1", "]", "[", ":", "3", "]", "return", "f'Wind shear {int(shear[0])*100}{unit_alt} from {wdir} at {shear[1][3:]}{unit_wind}'" ]
Translate wind shear into a readable string Ex: Wind shear 2000ft from 140 at 30kt
[ "Translate", "wind", "shear", "into", "a", "readable", "string" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/translate.py#L242-L252
244,001
etcher-be/emiz
emiz/avwx/translate.py
turb_ice
def turb_ice(turbice: [str], unit: str = 'ft') -> str: # type: ignore """ Translate the list of turbulance or icing into a readable sentence Ex: Occasional moderate turbulence in clouds from 3000ft to 14000ft """ if not turbice: return '' # Determine turbulance or icing if turbice[0][0] == '5': conditions = TURBULANCE_CONDITIONS elif turbice[0][0] == '6': conditions = ICING_CONDITIONS else: return '' # Create list of split items (type, floor, height) split = [] for item in turbice: if len(item) == 6: split.append([item[1:2], item[2:5], item[5]]) # Combine items that cover a layer greater than 9000ft for i in reversed(range(len(split) - 1)): if split[i][2] == '9' and split[i][0] == split[i + 1][0] \ and int(split[i + 1][1]) == (int(split[i][1]) + int(split[i][2]) * 10): split[i][2] = str(int(split[i][2]) + int(split[i + 1][2])) split.pop(i + 1) # Return joined, formatted string from split items return ', '.join(['{conditions} from {low_alt}{unit} to {high_alt}{unit}'.format( conditions=conditions[item[0]], low_alt=int(item[1]) * 100, high_alt=int(item[1]) * 100 + int(item[2]) * 1000, unit=unit) for item in split])
python
def turb_ice(turbice: [str], unit: str = 'ft') -> str: # type: ignore """ Translate the list of turbulance or icing into a readable sentence Ex: Occasional moderate turbulence in clouds from 3000ft to 14000ft """ if not turbice: return '' # Determine turbulance or icing if turbice[0][0] == '5': conditions = TURBULANCE_CONDITIONS elif turbice[0][0] == '6': conditions = ICING_CONDITIONS else: return '' # Create list of split items (type, floor, height) split = [] for item in turbice: if len(item) == 6: split.append([item[1:2], item[2:5], item[5]]) # Combine items that cover a layer greater than 9000ft for i in reversed(range(len(split) - 1)): if split[i][2] == '9' and split[i][0] == split[i + 1][0] \ and int(split[i + 1][1]) == (int(split[i][1]) + int(split[i][2]) * 10): split[i][2] = str(int(split[i][2]) + int(split[i + 1][2])) split.pop(i + 1) # Return joined, formatted string from split items return ', '.join(['{conditions} from {low_alt}{unit} to {high_alt}{unit}'.format( conditions=conditions[item[0]], low_alt=int(item[1]) * 100, high_alt=int(item[1]) * 100 + int(item[2]) * 1000, unit=unit) for item in split])
[ "def", "turb_ice", "(", "turbice", ":", "[", "str", "]", ",", "unit", ":", "str", "=", "'ft'", ")", "->", "str", ":", "# type: ignore", "if", "not", "turbice", ":", "return", "''", "# Determine turbulance or icing", "if", "turbice", "[", "0", "]", "[", "0", "]", "==", "'5'", ":", "conditions", "=", "TURBULANCE_CONDITIONS", "elif", "turbice", "[", "0", "]", "[", "0", "]", "==", "'6'", ":", "conditions", "=", "ICING_CONDITIONS", "else", ":", "return", "''", "# Create list of split items (type, floor, height)", "split", "=", "[", "]", "for", "item", "in", "turbice", ":", "if", "len", "(", "item", ")", "==", "6", ":", "split", ".", "append", "(", "[", "item", "[", "1", ":", "2", "]", ",", "item", "[", "2", ":", "5", "]", ",", "item", "[", "5", "]", "]", ")", "# Combine items that cover a layer greater than 9000ft", "for", "i", "in", "reversed", "(", "range", "(", "len", "(", "split", ")", "-", "1", ")", ")", ":", "if", "split", "[", "i", "]", "[", "2", "]", "==", "'9'", "and", "split", "[", "i", "]", "[", "0", "]", "==", "split", "[", "i", "+", "1", "]", "[", "0", "]", "and", "int", "(", "split", "[", "i", "+", "1", "]", "[", "1", "]", ")", "==", "(", "int", "(", "split", "[", "i", "]", "[", "1", "]", ")", "+", "int", "(", "split", "[", "i", "]", "[", "2", "]", ")", "*", "10", ")", ":", "split", "[", "i", "]", "[", "2", "]", "=", "str", "(", "int", "(", "split", "[", "i", "]", "[", "2", "]", ")", "+", "int", "(", "split", "[", "i", "+", "1", "]", "[", "2", "]", ")", ")", "split", ".", "pop", "(", "i", "+", "1", ")", "# Return joined, formatted string from split items", "return", "', '", ".", "join", "(", "[", "'{conditions} from {low_alt}{unit} to {high_alt}{unit}'", ".", "format", "(", "conditions", "=", "conditions", "[", "item", "[", "0", "]", "]", ",", "low_alt", "=", "int", "(", "item", "[", "1", "]", ")", "*", "100", ",", "high_alt", "=", "int", "(", "item", "[", "1", "]", ")", "*", "100", "+", "int", "(", "item", "[", "2", "]", ")", "*", "1000", ",", "unit", "=", "unit", ")", "for", "item", "in", "split", "]", ")" ]
Translate the list of turbulance or icing into a readable sentence Ex: Occasional moderate turbulence in clouds from 3000ft to 14000ft
[ "Translate", "the", "list", "of", "turbulance", "or", "icing", "into", "a", "readable", "sentence" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/translate.py#L255-L284
244,002
etcher-be/emiz
emiz/avwx/translate.py
min_max_temp
def min_max_temp(temp: str, unit: str = 'C') -> str: """ Format the Min and Max temp elemets into a readable string Ex: Maximum temperature of 23°C (73°F) at 18-15:00Z """ if not temp or len(temp) < 7: return '' if temp[:2] == 'TX': temp_type = 'Maximum' elif temp[:2] == 'TN': temp_type = 'Minimum' else: return '' temp = temp[2:].replace('M', '-').replace('Z', '').split('/') # type: ignore if len(temp[1]) > 2: temp[1] = temp[1][:2] + '-' + temp[1][2:] # type: ignore temp_value = temperature(core.make_number(temp[0]), unit) return f'{temp_type} temperature of {temp_value} at {temp[1]}:00Z'
python
def min_max_temp(temp: str, unit: str = 'C') -> str: """ Format the Min and Max temp elemets into a readable string Ex: Maximum temperature of 23°C (73°F) at 18-15:00Z """ if not temp or len(temp) < 7: return '' if temp[:2] == 'TX': temp_type = 'Maximum' elif temp[:2] == 'TN': temp_type = 'Minimum' else: return '' temp = temp[2:].replace('M', '-').replace('Z', '').split('/') # type: ignore if len(temp[1]) > 2: temp[1] = temp[1][:2] + '-' + temp[1][2:] # type: ignore temp_value = temperature(core.make_number(temp[0]), unit) return f'{temp_type} temperature of {temp_value} at {temp[1]}:00Z'
[ "def", "min_max_temp", "(", "temp", ":", "str", ",", "unit", ":", "str", "=", "'C'", ")", "->", "str", ":", "if", "not", "temp", "or", "len", "(", "temp", ")", "<", "7", ":", "return", "''", "if", "temp", "[", ":", "2", "]", "==", "'TX'", ":", "temp_type", "=", "'Maximum'", "elif", "temp", "[", ":", "2", "]", "==", "'TN'", ":", "temp_type", "=", "'Minimum'", "else", ":", "return", "''", "temp", "=", "temp", "[", "2", ":", "]", ".", "replace", "(", "'M'", ",", "'-'", ")", ".", "replace", "(", "'Z'", ",", "''", ")", ".", "split", "(", "'/'", ")", "# type: ignore", "if", "len", "(", "temp", "[", "1", "]", ")", ">", "2", ":", "temp", "[", "1", "]", "=", "temp", "[", "1", "]", "[", ":", "2", "]", "+", "'-'", "+", "temp", "[", "1", "]", "[", "2", ":", "]", "# type: ignore", "temp_value", "=", "temperature", "(", "core", ".", "make_number", "(", "temp", "[", "0", "]", ")", ",", "unit", ")", "return", "f'{temp_type} temperature of {temp_value} at {temp[1]}:00Z'" ]
Format the Min and Max temp elemets into a readable string Ex: Maximum temperature of 23°C (73°F) at 18-15:00Z
[ "Format", "the", "Min", "and", "Max", "temp", "elemets", "into", "a", "readable", "string" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/translate.py#L287-L305
244,003
etcher-be/emiz
emiz/avwx/translate.py
shared
def shared(wxdata: ReportData, units: Units) -> typing.Dict[str, str]: """ Translate Visibility, Altimeter, Clouds, and Other """ translations = {} translations['visibility'] = visibility(wxdata.visibility, units.visibility) # type: ignore translations['altimeter'] = altimeter(wxdata.altimeter, units.altimeter) # type: ignore translations['clouds'] = clouds(wxdata.clouds, units.altitude) # type: ignore translations['other'] = other_list(wxdata.other) # type: ignore return translations
python
def shared(wxdata: ReportData, units: Units) -> typing.Dict[str, str]: """ Translate Visibility, Altimeter, Clouds, and Other """ translations = {} translations['visibility'] = visibility(wxdata.visibility, units.visibility) # type: ignore translations['altimeter'] = altimeter(wxdata.altimeter, units.altimeter) # type: ignore translations['clouds'] = clouds(wxdata.clouds, units.altitude) # type: ignore translations['other'] = other_list(wxdata.other) # type: ignore return translations
[ "def", "shared", "(", "wxdata", ":", "ReportData", ",", "units", ":", "Units", ")", "->", "typing", ".", "Dict", "[", "str", ",", "str", "]", ":", "translations", "=", "{", "}", "translations", "[", "'visibility'", "]", "=", "visibility", "(", "wxdata", ".", "visibility", ",", "units", ".", "visibility", ")", "# type: ignore", "translations", "[", "'altimeter'", "]", "=", "altimeter", "(", "wxdata", ".", "altimeter", ",", "units", ".", "altimeter", ")", "# type: ignore", "translations", "[", "'clouds'", "]", "=", "clouds", "(", "wxdata", ".", "clouds", ",", "units", ".", "altitude", ")", "# type: ignore", "translations", "[", "'other'", "]", "=", "other_list", "(", "wxdata", ".", "other", ")", "# type: ignore", "return", "translations" ]
Translate Visibility, Altimeter, Clouds, and Other
[ "Translate", "Visibility", "Altimeter", "Clouds", "and", "Other" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/translate.py#L308-L317
244,004
etcher-be/emiz
emiz/avwx/translate.py
metar
def metar(wxdata: MetarData, units: Units) -> MetarTrans: """ Translate the results of metar.parse Keys: Wind, Visibility, Clouds, Temperature, Dewpoint, Altimeter, Other """ translations = shared(wxdata, units) translations['wind'] = wind(wxdata.wind_direction, wxdata.wind_speed, wxdata.wind_gust, wxdata.wind_variable_direction, units.wind_speed) translations['temperature'] = temperature(wxdata.temperature, units.temperature) translations['dewpoint'] = temperature(wxdata.dewpoint, units.temperature) translations['remarks'] = remarks.translate(wxdata.remarks) # type: ignore return MetarTrans(**translations)
python
def metar(wxdata: MetarData, units: Units) -> MetarTrans: """ Translate the results of metar.parse Keys: Wind, Visibility, Clouds, Temperature, Dewpoint, Altimeter, Other """ translations = shared(wxdata, units) translations['wind'] = wind(wxdata.wind_direction, wxdata.wind_speed, wxdata.wind_gust, wxdata.wind_variable_direction, units.wind_speed) translations['temperature'] = temperature(wxdata.temperature, units.temperature) translations['dewpoint'] = temperature(wxdata.dewpoint, units.temperature) translations['remarks'] = remarks.translate(wxdata.remarks) # type: ignore return MetarTrans(**translations)
[ "def", "metar", "(", "wxdata", ":", "MetarData", ",", "units", ":", "Units", ")", "->", "MetarTrans", ":", "translations", "=", "shared", "(", "wxdata", ",", "units", ")", "translations", "[", "'wind'", "]", "=", "wind", "(", "wxdata", ".", "wind_direction", ",", "wxdata", ".", "wind_speed", ",", "wxdata", ".", "wind_gust", ",", "wxdata", ".", "wind_variable_direction", ",", "units", ".", "wind_speed", ")", "translations", "[", "'temperature'", "]", "=", "temperature", "(", "wxdata", ".", "temperature", ",", "units", ".", "temperature", ")", "translations", "[", "'dewpoint'", "]", "=", "temperature", "(", "wxdata", ".", "dewpoint", ",", "units", ".", "temperature", ")", "translations", "[", "'remarks'", "]", "=", "remarks", ".", "translate", "(", "wxdata", ".", "remarks", ")", "# type: ignore", "return", "MetarTrans", "(", "*", "*", "translations", ")" ]
Translate the results of metar.parse Keys: Wind, Visibility, Clouds, Temperature, Dewpoint, Altimeter, Other
[ "Translate", "the", "results", "of", "metar", ".", "parse" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/translate.py#L320-L333
244,005
etcher-be/emiz
emiz/avwx/translate.py
taf
def taf(wxdata: TafData, units: Units) -> TafTrans: """ Translate the results of taf.parse Keys: Forecast, Min-Temp, Max-Temp Forecast keys: Wind, Visibility, Clouds, Altimeter, Wind-Shear, Turbulance, Icing, Other """ translations = {'forecast': []} # type: ignore for line in wxdata.forecast: trans = shared(line, units) # type: ignore trans['wind'] = wind(line.wind_direction, line.wind_speed, line.wind_gust, unit=units.wind_speed) trans['wind_shear'] = wind_shear(line.wind_shear, units.altitude, units.wind_speed) trans['turbulance'] = turb_ice(line.turbulance, units.altitude) trans['icing'] = turb_ice(line.icing, units.altitude) # Remove false 'Sky Clear' if line type is 'BECMG' if line.type == 'BECMG' and trans['clouds'] == 'Sky clear': trans['clouds'] = None # type: ignore translations['forecast'].append(TafLineTrans(**trans)) # type: ignore translations['min_temp'] = min_max_temp(wxdata.min_temp, units.temperature) # type: ignore translations['max_temp'] = min_max_temp(wxdata.max_temp, units.temperature) # type: ignore translations['remarks'] = remarks.translate(wxdata.remarks) return TafTrans(**translations)
python
def taf(wxdata: TafData, units: Units) -> TafTrans: """ Translate the results of taf.parse Keys: Forecast, Min-Temp, Max-Temp Forecast keys: Wind, Visibility, Clouds, Altimeter, Wind-Shear, Turbulance, Icing, Other """ translations = {'forecast': []} # type: ignore for line in wxdata.forecast: trans = shared(line, units) # type: ignore trans['wind'] = wind(line.wind_direction, line.wind_speed, line.wind_gust, unit=units.wind_speed) trans['wind_shear'] = wind_shear(line.wind_shear, units.altitude, units.wind_speed) trans['turbulance'] = turb_ice(line.turbulance, units.altitude) trans['icing'] = turb_ice(line.icing, units.altitude) # Remove false 'Sky Clear' if line type is 'BECMG' if line.type == 'BECMG' and trans['clouds'] == 'Sky clear': trans['clouds'] = None # type: ignore translations['forecast'].append(TafLineTrans(**trans)) # type: ignore translations['min_temp'] = min_max_temp(wxdata.min_temp, units.temperature) # type: ignore translations['max_temp'] = min_max_temp(wxdata.max_temp, units.temperature) # type: ignore translations['remarks'] = remarks.translate(wxdata.remarks) return TafTrans(**translations)
[ "def", "taf", "(", "wxdata", ":", "TafData", ",", "units", ":", "Units", ")", "->", "TafTrans", ":", "translations", "=", "{", "'forecast'", ":", "[", "]", "}", "# type: ignore", "for", "line", "in", "wxdata", ".", "forecast", ":", "trans", "=", "shared", "(", "line", ",", "units", ")", "# type: ignore", "trans", "[", "'wind'", "]", "=", "wind", "(", "line", ".", "wind_direction", ",", "line", ".", "wind_speed", ",", "line", ".", "wind_gust", ",", "unit", "=", "units", ".", "wind_speed", ")", "trans", "[", "'wind_shear'", "]", "=", "wind_shear", "(", "line", ".", "wind_shear", ",", "units", ".", "altitude", ",", "units", ".", "wind_speed", ")", "trans", "[", "'turbulance'", "]", "=", "turb_ice", "(", "line", ".", "turbulance", ",", "units", ".", "altitude", ")", "trans", "[", "'icing'", "]", "=", "turb_ice", "(", "line", ".", "icing", ",", "units", ".", "altitude", ")", "# Remove false 'Sky Clear' if line type is 'BECMG'", "if", "line", ".", "type", "==", "'BECMG'", "and", "trans", "[", "'clouds'", "]", "==", "'Sky clear'", ":", "trans", "[", "'clouds'", "]", "=", "None", "# type: ignore", "translations", "[", "'forecast'", "]", ".", "append", "(", "TafLineTrans", "(", "*", "*", "trans", ")", ")", "# type: ignore", "translations", "[", "'min_temp'", "]", "=", "min_max_temp", "(", "wxdata", ".", "min_temp", ",", "units", ".", "temperature", ")", "# type: ignore", "translations", "[", "'max_temp'", "]", "=", "min_max_temp", "(", "wxdata", ".", "max_temp", ",", "units", ".", "temperature", ")", "# type: ignore", "translations", "[", "'remarks'", "]", "=", "remarks", ".", "translate", "(", "wxdata", ".", "remarks", ")", "return", "TafTrans", "(", "*", "*", "translations", ")" ]
Translate the results of taf.parse Keys: Forecast, Min-Temp, Max-Temp Forecast keys: Wind, Visibility, Clouds, Altimeter, Wind-Shear, Turbulance, Icing, Other
[ "Translate", "the", "results", "of", "taf", ".", "parse" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/translate.py#L336-L359
244,006
rameshg87/pyremotevbox
pyremotevbox/ZSI/schema.py
_get_substitute_element
def _get_substitute_element(head, elt, ps): '''if elt matches a member of the head substitutionGroup, return the GED typecode. head -- ElementDeclaration typecode, elt -- the DOM element being parsed ps -- ParsedSoap Instance ''' if not isinstance(head, ElementDeclaration): return None return ElementDeclaration.getSubstitutionElement(head, elt, ps)
python
def _get_substitute_element(head, elt, ps): '''if elt matches a member of the head substitutionGroup, return the GED typecode. head -- ElementDeclaration typecode, elt -- the DOM element being parsed ps -- ParsedSoap Instance ''' if not isinstance(head, ElementDeclaration): return None return ElementDeclaration.getSubstitutionElement(head, elt, ps)
[ "def", "_get_substitute_element", "(", "head", ",", "elt", ",", "ps", ")", ":", "if", "not", "isinstance", "(", "head", ",", "ElementDeclaration", ")", ":", "return", "None", "return", "ElementDeclaration", ".", "getSubstitutionElement", "(", "head", ",", "elt", ",", "ps", ")" ]
if elt matches a member of the head substitutionGroup, return the GED typecode. head -- ElementDeclaration typecode, elt -- the DOM element being parsed ps -- ParsedSoap Instance
[ "if", "elt", "matches", "a", "member", "of", "the", "head", "substitutionGroup", "return", "the", "GED", "typecode", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/schema.py#L17-L28
244,007
rameshg87/pyremotevbox
pyremotevbox/ZSI/schema.py
_is_substitute_element
def _is_substitute_element(head, sub): '''if head and sub are both GEDs, and sub declares head as its substitutionGroup then return True. head -- Typecode instance sub -- Typecode instance ''' if not isinstance(head, ElementDeclaration) or not isinstance(sub, ElementDeclaration): return False try: group = sub.substitutionGroup except (AttributeError, TypeError): return False ged = GED(*group) # TODO: better way of representing element references. Wrap them with # facets, and dereference when needed and delegate to.. print (head.nspname == ged.nspname and head.pname == ged.pname) if head is ged or not (head.nspname == ged.nspname and head.pname == ged.pname): return False return True
python
def _is_substitute_element(head, sub): '''if head and sub are both GEDs, and sub declares head as its substitutionGroup then return True. head -- Typecode instance sub -- Typecode instance ''' if not isinstance(head, ElementDeclaration) or not isinstance(sub, ElementDeclaration): return False try: group = sub.substitutionGroup except (AttributeError, TypeError): return False ged = GED(*group) # TODO: better way of representing element references. Wrap them with # facets, and dereference when needed and delegate to.. print (head.nspname == ged.nspname and head.pname == ged.pname) if head is ged or not (head.nspname == ged.nspname and head.pname == ged.pname): return False return True
[ "def", "_is_substitute_element", "(", "head", ",", "sub", ")", ":", "if", "not", "isinstance", "(", "head", ",", "ElementDeclaration", ")", "or", "not", "isinstance", "(", "sub", ",", "ElementDeclaration", ")", ":", "return", "False", "try", ":", "group", "=", "sub", ".", "substitutionGroup", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "return", "False", "ged", "=", "GED", "(", "*", "group", ")", "# TODO: better way of representing element references. Wrap them with", "# facets, and dereference when needed and delegate to..", "print", "(", "head", ".", "nspname", "==", "ged", ".", "nspname", "and", "head", ".", "pname", "==", "ged", ".", "pname", ")", "if", "head", "is", "ged", "or", "not", "(", "head", ".", "nspname", "==", "ged", ".", "nspname", "and", "head", ".", "pname", "==", "ged", ".", "pname", ")", ":", "return", "False", "return", "True" ]
if head and sub are both GEDs, and sub declares head as its substitutionGroup then return True. head -- Typecode instance sub -- Typecode instance
[ "if", "head", "and", "sub", "are", "both", "GEDs", "and", "sub", "declares", "head", "as", "its", "substitutionGroup", "then", "return", "True", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/schema.py#L33-L56
244,008
rameshg87/pyremotevbox
pyremotevbox/ZSI/schema.py
SchemaInstanceType.getElementDeclaration
def getElementDeclaration(cls, namespaceURI, name, isref=False, lazy=False): '''Grab an element declaration, returns a typecode instance representation or a typecode class definition. An element reference has its own facets, and is local so it will not be cached. Parameters: namespaceURI -- name -- isref -- if element reference, return class definition. ''' key = (namespaceURI, name) if isref: klass = cls.elements.get(key,None) if klass is not None and lazy is True: return _Mirage(klass) return klass typecode = cls.element_typecode_cache.get(key, None) if typecode is None: tcls = cls.elements.get(key,None) if tcls is not None: typecode = cls.element_typecode_cache[key] = tcls() typecode.typed = False return typecode
python
def getElementDeclaration(cls, namespaceURI, name, isref=False, lazy=False): '''Grab an element declaration, returns a typecode instance representation or a typecode class definition. An element reference has its own facets, and is local so it will not be cached. Parameters: namespaceURI -- name -- isref -- if element reference, return class definition. ''' key = (namespaceURI, name) if isref: klass = cls.elements.get(key,None) if klass is not None and lazy is True: return _Mirage(klass) return klass typecode = cls.element_typecode_cache.get(key, None) if typecode is None: tcls = cls.elements.get(key,None) if tcls is not None: typecode = cls.element_typecode_cache[key] = tcls() typecode.typed = False return typecode
[ "def", "getElementDeclaration", "(", "cls", ",", "namespaceURI", ",", "name", ",", "isref", "=", "False", ",", "lazy", "=", "False", ")", ":", "key", "=", "(", "namespaceURI", ",", "name", ")", "if", "isref", ":", "klass", "=", "cls", ".", "elements", ".", "get", "(", "key", ",", "None", ")", "if", "klass", "is", "not", "None", "and", "lazy", "is", "True", ":", "return", "_Mirage", "(", "klass", ")", "return", "klass", "typecode", "=", "cls", ".", "element_typecode_cache", ".", "get", "(", "key", ",", "None", ")", "if", "typecode", "is", "None", ":", "tcls", "=", "cls", ".", "elements", ".", "get", "(", "key", ",", "None", ")", "if", "tcls", "is", "not", "None", ":", "typecode", "=", "cls", ".", "element_typecode_cache", "[", "key", "]", "=", "tcls", "(", ")", "typecode", ".", "typed", "=", "False", "return", "typecode" ]
Grab an element declaration, returns a typecode instance representation or a typecode class definition. An element reference has its own facets, and is local so it will not be cached. Parameters: namespaceURI -- name -- isref -- if element reference, return class definition.
[ "Grab", "an", "element", "declaration", "returns", "a", "typecode", "instance", "representation", "or", "a", "typecode", "class", "definition", ".", "An", "element", "reference", "has", "its", "own", "facets", "and", "is", "local", "so", "it", "will", "not", "be", "cached", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/schema.py#L164-L189
244,009
rameshg87/pyremotevbox
pyremotevbox/ZSI/schema.py
ElementDeclaration.checkSubstitute
def checkSubstitute(self, typecode): '''If this is True, allow typecode to be substituted for "self" typecode. ''' if not isinstance(typecode, ElementDeclaration): return False try: nsuri,ncname = typecode.substitutionGroup except (AttributeError, TypeError): return False if (nsuri,ncname) != (self.schema,self.literal): # allow slop with the empty namespace if not nsuri and not self.schema and ncname == self.literal: return True return False sub = GED(self.schema, self.literal) if sub is None or sub is not typecode: return False return True
python
def checkSubstitute(self, typecode): '''If this is True, allow typecode to be substituted for "self" typecode. ''' if not isinstance(typecode, ElementDeclaration): return False try: nsuri,ncname = typecode.substitutionGroup except (AttributeError, TypeError): return False if (nsuri,ncname) != (self.schema,self.literal): # allow slop with the empty namespace if not nsuri and not self.schema and ncname == self.literal: return True return False sub = GED(self.schema, self.literal) if sub is None or sub is not typecode: return False return True
[ "def", "checkSubstitute", "(", "self", ",", "typecode", ")", ":", "if", "not", "isinstance", "(", "typecode", ",", "ElementDeclaration", ")", ":", "return", "False", "try", ":", "nsuri", ",", "ncname", "=", "typecode", ".", "substitutionGroup", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "return", "False", "if", "(", "nsuri", ",", "ncname", ")", "!=", "(", "self", ".", "schema", ",", "self", ".", "literal", ")", ":", "# allow slop with the empty namespace ", "if", "not", "nsuri", "and", "not", "self", ".", "schema", "and", "ncname", "==", "self", ".", "literal", ":", "return", "True", "return", "False", "sub", "=", "GED", "(", "self", ".", "schema", ",", "self", ".", "literal", ")", "if", "sub", "is", "None", "or", "sub", "is", "not", "typecode", ":", "return", "False", "return", "True" ]
If this is True, allow typecode to be substituted for "self" typecode.
[ "If", "this", "is", "True", "allow", "typecode", "to", "be", "substituted", "for", "self", "typecode", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/schema.py#L203-L226
244,010
rameshg87/pyremotevbox
pyremotevbox/ZSI/schema.py
ElementDeclaration.getSubstitutionElement
def getSubstitutionElement(self, elt, ps): '''if elt matches a member of the head substitutionGroup, return the GED typecode representation of the member. head -- ElementDeclaration typecode, elt -- the DOM element being parsed ps -- ParsedSoap instance ''' nsuri,ncname = _get_element_nsuri_name(elt) typecode = GED(nsuri,ncname) if typecode is None: return try: nsuri,ncname = typecode.substitutionGroup except (AttributeError, TypeError): return if (ncname == self.pname) and (nsuri == self.nspname or (not nsuri and not self.nspname)): return typecode return
python
def getSubstitutionElement(self, elt, ps): '''if elt matches a member of the head substitutionGroup, return the GED typecode representation of the member. head -- ElementDeclaration typecode, elt -- the DOM element being parsed ps -- ParsedSoap instance ''' nsuri,ncname = _get_element_nsuri_name(elt) typecode = GED(nsuri,ncname) if typecode is None: return try: nsuri,ncname = typecode.substitutionGroup except (AttributeError, TypeError): return if (ncname == self.pname) and (nsuri == self.nspname or (not nsuri and not self.nspname)): return typecode return
[ "def", "getSubstitutionElement", "(", "self", ",", "elt", ",", "ps", ")", ":", "nsuri", ",", "ncname", "=", "_get_element_nsuri_name", "(", "elt", ")", "typecode", "=", "GED", "(", "nsuri", ",", "ncname", ")", "if", "typecode", "is", "None", ":", "return", "try", ":", "nsuri", ",", "ncname", "=", "typecode", ".", "substitutionGroup", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "return", "if", "(", "ncname", "==", "self", ".", "pname", ")", "and", "(", "nsuri", "==", "self", ".", "nspname", "or", "(", "not", "nsuri", "and", "not", "self", ".", "nspname", ")", ")", ":", "return", "typecode", "return" ]
if elt matches a member of the head substitutionGroup, return the GED typecode representation of the member. head -- ElementDeclaration typecode, elt -- the DOM element being parsed ps -- ParsedSoap instance
[ "if", "elt", "matches", "a", "member", "of", "the", "head", "substitutionGroup", "return", "the", "GED", "typecode", "representation", "of", "the", "member", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/schema.py#L228-L250
244,011
rameshg87/pyremotevbox
pyremotevbox/ZSI/schema.py
_GetPyobjWrapper.RegisterBuiltin
def RegisterBuiltin(cls, arg): '''register a builtin, create a new wrapper. ''' if arg in cls.types_dict: raise RuntimeError, '%s already registered' %arg class _Wrapper(arg): 'Wrapper for builtin %s\n%s' %(arg, cls.__doc__) _Wrapper.__name__ = '_%sWrapper' %arg.__name__ cls.types_dict[arg] = _Wrapper
python
def RegisterBuiltin(cls, arg): '''register a builtin, create a new wrapper. ''' if arg in cls.types_dict: raise RuntimeError, '%s already registered' %arg class _Wrapper(arg): 'Wrapper for builtin %s\n%s' %(arg, cls.__doc__) _Wrapper.__name__ = '_%sWrapper' %arg.__name__ cls.types_dict[arg] = _Wrapper
[ "def", "RegisterBuiltin", "(", "cls", ",", "arg", ")", ":", "if", "arg", "in", "cls", ".", "types_dict", ":", "raise", "RuntimeError", ",", "'%s already registered'", "%", "arg", "class", "_Wrapper", "(", "arg", ")", ":", "'Wrapper for builtin %s\\n%s'", "%", "(", "arg", ",", "cls", ".", "__doc__", ")", "_Wrapper", ".", "__name__", "=", "'_%sWrapper'", "%", "arg", ".", "__name__", "cls", ".", "types_dict", "[", "arg", "]", "=", "_Wrapper" ]
register a builtin, create a new wrapper.
[ "register", "a", "builtin", "create", "a", "new", "wrapper", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/schema.py#L376-L384
244,012
rameshg87/pyremotevbox
pyremotevbox/ZSI/schema.py
_GetPyobjWrapper.RegisterAnyElement
def RegisterAnyElement(cls): '''If find registered TypeCode instance, add Wrapper class to TypeCode class serialmap and Re-RegisterType. Provides Any serialzation of any instances of the Wrapper. ''' for k,v in cls.types_dict.items(): what = Any.serialmap.get(k) if what is None: continue if v in what.__class__.seriallist: continue what.__class__.seriallist.append(v) RegisterType(what.__class__, clobber=1, **what.__dict__)
python
def RegisterAnyElement(cls): '''If find registered TypeCode instance, add Wrapper class to TypeCode class serialmap and Re-RegisterType. Provides Any serialzation of any instances of the Wrapper. ''' for k,v in cls.types_dict.items(): what = Any.serialmap.get(k) if what is None: continue if v in what.__class__.seriallist: continue what.__class__.seriallist.append(v) RegisterType(what.__class__, clobber=1, **what.__dict__)
[ "def", "RegisterAnyElement", "(", "cls", ")", ":", "for", "k", ",", "v", "in", "cls", ".", "types_dict", ".", "items", "(", ")", ":", "what", "=", "Any", ".", "serialmap", ".", "get", "(", "k", ")", "if", "what", "is", "None", ":", "continue", "if", "v", "in", "what", ".", "__class__", ".", "seriallist", ":", "continue", "what", ".", "__class__", ".", "seriallist", ".", "append", "(", "v", ")", "RegisterType", "(", "what", ".", "__class__", ",", "clobber", "=", "1", ",", "*", "*", "what", ".", "__dict__", ")" ]
If find registered TypeCode instance, add Wrapper class to TypeCode class serialmap and Re-RegisterType. Provides Any serialzation of any instances of the Wrapper.
[ "If", "find", "registered", "TypeCode", "instance", "add", "Wrapper", "class", "to", "TypeCode", "class", "serialmap", "and", "Re", "-", "RegisterType", ".", "Provides", "Any", "serialzation", "of", "any", "instances", "of", "the", "Wrapper", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/schema.py#L387-L397
244,013
rmetcalf9/baseapp_for_restapi_backend_with_swagger
baseapp_for_restapi_backend_with_swagger/FlaskRestSubclass.py
FlaskRestSubclass.render_doc
def render_doc(self): '''Override this method to customize the documentation page''' if self._doc_view: return self._doc_view() elif not self._doc: self.abort(self.bc_HTTPStatus_NOT_FOUND) res = render_template('swagger-ui.html', title=self.title, specs_url=self.specs_url) res = res.replace(self.complexReplaceString,self.APIDOCSPath) regexp="\"https?:\/\/[a-zA-Z0\-9._]*(:[0-9]*)?" + self.internal_api_prefix.replace("/","\/") + "\/swagger.json\"" regexp="\"https?:\/\/[a-zA-Z0\-9._]*(:[0-9]*)?" + self.internal_apidoc_prefix.replace("/","\/") + "\/swagger.json\"" p = re.compile(regexp) res = p.sub("\"" + self.apidocsurl + "/swagger.json\"", res) ''' if (self.overrideAPIDOCSPath()): #print("About to replace") #print(res) res = self.reaplcements(res) #print("Replaced") #print(res) #print("End") ''' return res
python
def render_doc(self): '''Override this method to customize the documentation page''' if self._doc_view: return self._doc_view() elif not self._doc: self.abort(self.bc_HTTPStatus_NOT_FOUND) res = render_template('swagger-ui.html', title=self.title, specs_url=self.specs_url) res = res.replace(self.complexReplaceString,self.APIDOCSPath) regexp="\"https?:\/\/[a-zA-Z0\-9._]*(:[0-9]*)?" + self.internal_api_prefix.replace("/","\/") + "\/swagger.json\"" regexp="\"https?:\/\/[a-zA-Z0\-9._]*(:[0-9]*)?" + self.internal_apidoc_prefix.replace("/","\/") + "\/swagger.json\"" p = re.compile(regexp) res = p.sub("\"" + self.apidocsurl + "/swagger.json\"", res) ''' if (self.overrideAPIDOCSPath()): #print("About to replace") #print(res) res = self.reaplcements(res) #print("Replaced") #print(res) #print("End") ''' return res
[ "def", "render_doc", "(", "self", ")", ":", "if", "self", ".", "_doc_view", ":", "return", "self", ".", "_doc_view", "(", ")", "elif", "not", "self", ".", "_doc", ":", "self", ".", "abort", "(", "self", ".", "bc_HTTPStatus_NOT_FOUND", ")", "res", "=", "render_template", "(", "'swagger-ui.html'", ",", "title", "=", "self", ".", "title", ",", "specs_url", "=", "self", ".", "specs_url", ")", "res", "=", "res", ".", "replace", "(", "self", ".", "complexReplaceString", ",", "self", ".", "APIDOCSPath", ")", "regexp", "=", "\"\\\"https?:\\/\\/[a-zA-Z0\\-9._]*(:[0-9]*)?\"", "+", "self", ".", "internal_api_prefix", ".", "replace", "(", "\"/\"", ",", "\"\\/\"", ")", "+", "\"\\/swagger.json\\\"\"", "regexp", "=", "\"\\\"https?:\\/\\/[a-zA-Z0\\-9._]*(:[0-9]*)?\"", "+", "self", ".", "internal_apidoc_prefix", ".", "replace", "(", "\"/\"", ",", "\"\\/\"", ")", "+", "\"\\/swagger.json\\\"\"", "p", "=", "re", ".", "compile", "(", "regexp", ")", "res", "=", "p", ".", "sub", "(", "\"\\\"\"", "+", "self", ".", "apidocsurl", "+", "\"/swagger.json\\\"\"", ",", "res", ")", "'''\n if (self.overrideAPIDOCSPath()):\n #print(\"About to replace\")\n #print(res)\n res = self.reaplcements(res)\n #print(\"Replaced\")\n #print(res)\n #print(\"End\")\n '''", "return", "res" ]
Override this method to customize the documentation page
[ "Override", "this", "method", "to", "customize", "the", "documentation", "page" ]
c7c797cf810929cdb044215b63bdf9e5024f46e4
https://github.com/rmetcalf9/baseapp_for_restapi_backend_with_swagger/blob/c7c797cf810929cdb044215b63bdf9e5024f46e4/baseapp_for_restapi_backend_with_swagger/FlaskRestSubclass.py#L122-L144
244,014
SergeySatskiy/cdm-gc-plugin
cdmplugins/gc/configdlg.py
GCPluginConfigDialog.__createLayout
def __createLayout(self): """Creates the dialog layout""" self.resize(450, 150) self.setSizeGripEnabled(True) verticalLayout = QVBoxLayout(self) whereGroupbox = QGroupBox(self) whereGroupbox.setTitle("Garbage collector message destination") sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( whereGroupbox.sizePolicy().hasHeightForWidth()) whereGroupbox.setSizePolicy(sizePolicy) layoutWhere = QVBoxLayout(whereGroupbox) self.__silentRButton = QRadioButton(whereGroupbox) self.__silentRButton.setText("Silent") layoutWhere.addWidget(self.__silentRButton) self.__statusbarRButton = QRadioButton(whereGroupbox) self.__statusbarRButton.setText("Status bar") layoutWhere.addWidget(self.__statusbarRButton) self.__logtabRButton = QRadioButton(whereGroupbox) self.__logtabRButton.setText("Log tab") layoutWhere.addWidget(self.__logtabRButton) verticalLayout.addWidget(whereGroupbox) buttonBox = QDialogButtonBox(self) buttonBox.setOrientation(Qt.Horizontal) buttonBox.setStandardButtons(QDialogButtonBox.Ok | QDialogButtonBox.Cancel) self.__OKButton = buttonBox.button(QDialogButtonBox.Ok) self.__OKButton.setDefault(True) buttonBox.accepted.connect(self.accept) buttonBox.rejected.connect(self.close) verticalLayout.addWidget(buttonBox)
python
def __createLayout(self): """Creates the dialog layout""" self.resize(450, 150) self.setSizeGripEnabled(True) verticalLayout = QVBoxLayout(self) whereGroupbox = QGroupBox(self) whereGroupbox.setTitle("Garbage collector message destination") sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth( whereGroupbox.sizePolicy().hasHeightForWidth()) whereGroupbox.setSizePolicy(sizePolicy) layoutWhere = QVBoxLayout(whereGroupbox) self.__silentRButton = QRadioButton(whereGroupbox) self.__silentRButton.setText("Silent") layoutWhere.addWidget(self.__silentRButton) self.__statusbarRButton = QRadioButton(whereGroupbox) self.__statusbarRButton.setText("Status bar") layoutWhere.addWidget(self.__statusbarRButton) self.__logtabRButton = QRadioButton(whereGroupbox) self.__logtabRButton.setText("Log tab") layoutWhere.addWidget(self.__logtabRButton) verticalLayout.addWidget(whereGroupbox) buttonBox = QDialogButtonBox(self) buttonBox.setOrientation(Qt.Horizontal) buttonBox.setStandardButtons(QDialogButtonBox.Ok | QDialogButtonBox.Cancel) self.__OKButton = buttonBox.button(QDialogButtonBox.Ok) self.__OKButton.setDefault(True) buttonBox.accepted.connect(self.accept) buttonBox.rejected.connect(self.close) verticalLayout.addWidget(buttonBox)
[ "def", "__createLayout", "(", "self", ")", ":", "self", ".", "resize", "(", "450", ",", "150", ")", "self", ".", "setSizeGripEnabled", "(", "True", ")", "verticalLayout", "=", "QVBoxLayout", "(", "self", ")", "whereGroupbox", "=", "QGroupBox", "(", "self", ")", "whereGroupbox", ".", "setTitle", "(", "\"Garbage collector message destination\"", ")", "sizePolicy", "=", "QSizePolicy", "(", "QSizePolicy", ".", "Expanding", ",", "QSizePolicy", ".", "Preferred", ")", "sizePolicy", ".", "setHorizontalStretch", "(", "0", ")", "sizePolicy", ".", "setVerticalStretch", "(", "0", ")", "sizePolicy", ".", "setHeightForWidth", "(", "whereGroupbox", ".", "sizePolicy", "(", ")", ".", "hasHeightForWidth", "(", ")", ")", "whereGroupbox", ".", "setSizePolicy", "(", "sizePolicy", ")", "layoutWhere", "=", "QVBoxLayout", "(", "whereGroupbox", ")", "self", ".", "__silentRButton", "=", "QRadioButton", "(", "whereGroupbox", ")", "self", ".", "__silentRButton", ".", "setText", "(", "\"Silent\"", ")", "layoutWhere", ".", "addWidget", "(", "self", ".", "__silentRButton", ")", "self", ".", "__statusbarRButton", "=", "QRadioButton", "(", "whereGroupbox", ")", "self", ".", "__statusbarRButton", ".", "setText", "(", "\"Status bar\"", ")", "layoutWhere", ".", "addWidget", "(", "self", ".", "__statusbarRButton", ")", "self", ".", "__logtabRButton", "=", "QRadioButton", "(", "whereGroupbox", ")", "self", ".", "__logtabRButton", ".", "setText", "(", "\"Log tab\"", ")", "layoutWhere", ".", "addWidget", "(", "self", ".", "__logtabRButton", ")", "verticalLayout", ".", "addWidget", "(", "whereGroupbox", ")", "buttonBox", "=", "QDialogButtonBox", "(", "self", ")", "buttonBox", ".", "setOrientation", "(", "Qt", ".", "Horizontal", ")", "buttonBox", ".", "setStandardButtons", "(", "QDialogButtonBox", ".", "Ok", "|", "QDialogButtonBox", ".", "Cancel", ")", "self", ".", "__OKButton", "=", "buttonBox", ".", "button", "(", "QDialogButtonBox", ".", "Ok", ")", "self", ".", "__OKButton", ".", "setDefault", "(", "True", ")", "buttonBox", ".", "accepted", ".", "connect", "(", "self", ".", "accept", ")", "buttonBox", ".", "rejected", ".", "connect", "(", "self", ".", "close", ")", "verticalLayout", ".", "addWidget", "(", "buttonBox", ")" ]
Creates the dialog layout
[ "Creates", "the", "dialog", "layout" ]
f6dd59d5dc80d3f8f5ca5bcf49bad86c88be38df
https://github.com/SergeySatskiy/cdm-gc-plugin/blob/f6dd59d5dc80d3f8f5ca5bcf49bad86c88be38df/cdmplugins/gc/configdlg.py#L50-L87
244,015
SergeySatskiy/cdm-gc-plugin
cdmplugins/gc/configdlg.py
GCPluginConfigDialog.getCheckedOption
def getCheckedOption(self): """Returns what destination is selected""" if self.__silentRButton.isChecked(): return GCPluginConfigDialog.SILENT if self.__statusbarRButton.isChecked(): return GCPluginConfigDialog.STATUS_BAR return GCPluginConfigDialog.LOG
python
def getCheckedOption(self): """Returns what destination is selected""" if self.__silentRButton.isChecked(): return GCPluginConfigDialog.SILENT if self.__statusbarRButton.isChecked(): return GCPluginConfigDialog.STATUS_BAR return GCPluginConfigDialog.LOG
[ "def", "getCheckedOption", "(", "self", ")", ":", "if", "self", ".", "__silentRButton", ".", "isChecked", "(", ")", ":", "return", "GCPluginConfigDialog", ".", "SILENT", "if", "self", ".", "__statusbarRButton", ".", "isChecked", "(", ")", ":", "return", "GCPluginConfigDialog", ".", "STATUS_BAR", "return", "GCPluginConfigDialog", ".", "LOG" ]
Returns what destination is selected
[ "Returns", "what", "destination", "is", "selected" ]
f6dd59d5dc80d3f8f5ca5bcf49bad86c88be38df
https://github.com/SergeySatskiy/cdm-gc-plugin/blob/f6dd59d5dc80d3f8f5ca5bcf49bad86c88be38df/cdmplugins/gc/configdlg.py#L89-L95
244,016
carlosp420/dataset-creator
dataset_creator/utils.py
get_seq
def get_seq(seq_record, codon_positions, aminoacids=False, degenerate=None): """ Checks parameters such as codon_positions, aminoacids... to return the required sequence as string. Parameters: seq_record (SeqRecordExpanded object): codon_positions (str): aminoacids (boolean): Returns: Namedtuple containing ``seq (str)`` and ``warning (str)``. """ Sequence = namedtuple('Sequence', ['seq', 'warning']) if codon_positions not in [None, '1st', '2nd', '3rd', '1st-2nd', 'ALL']: raise WrongParameterFormat("`codon_positions` argument should be any of the following" ": 1st, 2nd, 3rd, 1st-2nd or ALL") if aminoacids: aa = seq_record.translate() if '*' in aa: warning = "Gene {0}, sequence {1} contains stop codons '*'".format(seq_record.gene_code, seq_record.voucher_code) else: warning = None return Sequence(seq=aa, warning=warning) if degenerate: return Sequence(seq=seq_record.degenerate(degenerate), warning=None) if codon_positions == '1st': return Sequence(seq=seq_record.first_codon_position(), warning=None) elif codon_positions == '2nd': return Sequence(seq=seq_record.second_codon_position(), warning=None) elif codon_positions == '3rd': return Sequence(seq=seq_record.third_codon_position(), warning=None) elif codon_positions == '1st-2nd': return Sequence(seq=seq_record.first_and_second_codon_positions(), warning=None) else: # None and ALL return Sequence(seq=str(seq_record.seq), warning=None)
python
def get_seq(seq_record, codon_positions, aminoacids=False, degenerate=None): """ Checks parameters such as codon_positions, aminoacids... to return the required sequence as string. Parameters: seq_record (SeqRecordExpanded object): codon_positions (str): aminoacids (boolean): Returns: Namedtuple containing ``seq (str)`` and ``warning (str)``. """ Sequence = namedtuple('Sequence', ['seq', 'warning']) if codon_positions not in [None, '1st', '2nd', '3rd', '1st-2nd', 'ALL']: raise WrongParameterFormat("`codon_positions` argument should be any of the following" ": 1st, 2nd, 3rd, 1st-2nd or ALL") if aminoacids: aa = seq_record.translate() if '*' in aa: warning = "Gene {0}, sequence {1} contains stop codons '*'".format(seq_record.gene_code, seq_record.voucher_code) else: warning = None return Sequence(seq=aa, warning=warning) if degenerate: return Sequence(seq=seq_record.degenerate(degenerate), warning=None) if codon_positions == '1st': return Sequence(seq=seq_record.first_codon_position(), warning=None) elif codon_positions == '2nd': return Sequence(seq=seq_record.second_codon_position(), warning=None) elif codon_positions == '3rd': return Sequence(seq=seq_record.third_codon_position(), warning=None) elif codon_positions == '1st-2nd': return Sequence(seq=seq_record.first_and_second_codon_positions(), warning=None) else: # None and ALL return Sequence(seq=str(seq_record.seq), warning=None)
[ "def", "get_seq", "(", "seq_record", ",", "codon_positions", ",", "aminoacids", "=", "False", ",", "degenerate", "=", "None", ")", ":", "Sequence", "=", "namedtuple", "(", "'Sequence'", ",", "[", "'seq'", ",", "'warning'", "]", ")", "if", "codon_positions", "not", "in", "[", "None", ",", "'1st'", ",", "'2nd'", ",", "'3rd'", ",", "'1st-2nd'", ",", "'ALL'", "]", ":", "raise", "WrongParameterFormat", "(", "\"`codon_positions` argument should be any of the following\"", "\": 1st, 2nd, 3rd, 1st-2nd or ALL\"", ")", "if", "aminoacids", ":", "aa", "=", "seq_record", ".", "translate", "(", ")", "if", "'*'", "in", "aa", ":", "warning", "=", "\"Gene {0}, sequence {1} contains stop codons '*'\"", ".", "format", "(", "seq_record", ".", "gene_code", ",", "seq_record", ".", "voucher_code", ")", "else", ":", "warning", "=", "None", "return", "Sequence", "(", "seq", "=", "aa", ",", "warning", "=", "warning", ")", "if", "degenerate", ":", "return", "Sequence", "(", "seq", "=", "seq_record", ".", "degenerate", "(", "degenerate", ")", ",", "warning", "=", "None", ")", "if", "codon_positions", "==", "'1st'", ":", "return", "Sequence", "(", "seq", "=", "seq_record", ".", "first_codon_position", "(", ")", ",", "warning", "=", "None", ")", "elif", "codon_positions", "==", "'2nd'", ":", "return", "Sequence", "(", "seq", "=", "seq_record", ".", "second_codon_position", "(", ")", ",", "warning", "=", "None", ")", "elif", "codon_positions", "==", "'3rd'", ":", "return", "Sequence", "(", "seq", "=", "seq_record", ".", "third_codon_position", "(", ")", ",", "warning", "=", "None", ")", "elif", "codon_positions", "==", "'1st-2nd'", ":", "return", "Sequence", "(", "seq", "=", "seq_record", ".", "first_and_second_codon_positions", "(", ")", ",", "warning", "=", "None", ")", "else", ":", "# None and ALL", "return", "Sequence", "(", "seq", "=", "str", "(", "seq_record", ".", "seq", ")", ",", "warning", "=", "None", ")" ]
Checks parameters such as codon_positions, aminoacids... to return the required sequence as string. Parameters: seq_record (SeqRecordExpanded object): codon_positions (str): aminoacids (boolean): Returns: Namedtuple containing ``seq (str)`` and ``warning (str)``.
[ "Checks", "parameters", "such", "as", "codon_positions", "aminoacids", "...", "to", "return", "the", "required", "sequence", "as", "string", "." ]
ea27340b145cb566a36c1836ff42263f1b2003a0
https://github.com/carlosp420/dataset-creator/blob/ea27340b145cb566a36c1836ff42263f1b2003a0/dataset_creator/utils.py#L17-L56
244,017
carlosp420/dataset-creator
dataset_creator/utils.py
convert_nexus_to_format
def convert_nexus_to_format(dataset_as_nexus, dataset_format): """ Converts nexus format to Phylip and Fasta using Biopython tools. :param dataset_as_nexus: :param dataset_format: :return: """ fake_handle = StringIO(dataset_as_nexus) nexus_al = AlignIO.parse(fake_handle, 'nexus') tmp_file = make_random_filename() AlignIO.write(nexus_al, tmp_file, dataset_format) dataset_as_fasta = read_and_delete_tmp_file(tmp_file) return dataset_as_fasta
python
def convert_nexus_to_format(dataset_as_nexus, dataset_format): """ Converts nexus format to Phylip and Fasta using Biopython tools. :param dataset_as_nexus: :param dataset_format: :return: """ fake_handle = StringIO(dataset_as_nexus) nexus_al = AlignIO.parse(fake_handle, 'nexus') tmp_file = make_random_filename() AlignIO.write(nexus_al, tmp_file, dataset_format) dataset_as_fasta = read_and_delete_tmp_file(tmp_file) return dataset_as_fasta
[ "def", "convert_nexus_to_format", "(", "dataset_as_nexus", ",", "dataset_format", ")", ":", "fake_handle", "=", "StringIO", "(", "dataset_as_nexus", ")", "nexus_al", "=", "AlignIO", ".", "parse", "(", "fake_handle", ",", "'nexus'", ")", "tmp_file", "=", "make_random_filename", "(", ")", "AlignIO", ".", "write", "(", "nexus_al", ",", "tmp_file", ",", "dataset_format", ")", "dataset_as_fasta", "=", "read_and_delete_tmp_file", "(", "tmp_file", ")", "return", "dataset_as_fasta" ]
Converts nexus format to Phylip and Fasta using Biopython tools. :param dataset_as_nexus: :param dataset_format: :return:
[ "Converts", "nexus", "format", "to", "Phylip", "and", "Fasta", "using", "Biopython", "tools", "." ]
ea27340b145cb566a36c1836ff42263f1b2003a0
https://github.com/carlosp420/dataset-creator/blob/ea27340b145cb566a36c1836ff42263f1b2003a0/dataset_creator/utils.py#L59-L72
244,018
tonyseek/html5lib-truncation
html5lib_truncation/utils.py
truncate_sentence
def truncate_sentence(text, max_chars, break_words=False, padding=0): """Truncates a sentence. :param max_chars: The maximum characters of truncated sentence. :param break_words: If you wish to truncate given sentence strictly even if it breaks a word, set it to ``True``. It defaults to ``False`` which means truncating given sentence shorter but never breaking words. :param padding: The padding size for truncating. It is usually used to keep spaces for some ending characters such as ``"..."``. :return: The truncated sentence. """ if break_words: return text[:-abs(max_chars - len(text)) - padding] words = [] for word in text.split(): predicted_len = ( sum(map(len, words)) + # length of words len(word) + # length of next word len(words) - 1 + # length of spaces padding) if predicted_len >= max_chars: break words.append(word) return ' '.join(words)
python
def truncate_sentence(text, max_chars, break_words=False, padding=0): """Truncates a sentence. :param max_chars: The maximum characters of truncated sentence. :param break_words: If you wish to truncate given sentence strictly even if it breaks a word, set it to ``True``. It defaults to ``False`` which means truncating given sentence shorter but never breaking words. :param padding: The padding size for truncating. It is usually used to keep spaces for some ending characters such as ``"..."``. :return: The truncated sentence. """ if break_words: return text[:-abs(max_chars - len(text)) - padding] words = [] for word in text.split(): predicted_len = ( sum(map(len, words)) + # length of words len(word) + # length of next word len(words) - 1 + # length of spaces padding) if predicted_len >= max_chars: break words.append(word) return ' '.join(words)
[ "def", "truncate_sentence", "(", "text", ",", "max_chars", ",", "break_words", "=", "False", ",", "padding", "=", "0", ")", ":", "if", "break_words", ":", "return", "text", "[", ":", "-", "abs", "(", "max_chars", "-", "len", "(", "text", ")", ")", "-", "padding", "]", "words", "=", "[", "]", "for", "word", "in", "text", ".", "split", "(", ")", ":", "predicted_len", "=", "(", "sum", "(", "map", "(", "len", ",", "words", ")", ")", "+", "# length of words", "len", "(", "word", ")", "+", "# length of next word", "len", "(", "words", ")", "-", "1", "+", "# length of spaces", "padding", ")", "if", "predicted_len", ">=", "max_chars", ":", "break", "words", ".", "append", "(", "word", ")", "return", "' '", ".", "join", "(", "words", ")" ]
Truncates a sentence. :param max_chars: The maximum characters of truncated sentence. :param break_words: If you wish to truncate given sentence strictly even if it breaks a word, set it to ``True``. It defaults to ``False`` which means truncating given sentence shorter but never breaking words. :param padding: The padding size for truncating. It is usually used to keep spaces for some ending characters such as ``"..."``. :return: The truncated sentence.
[ "Truncates", "a", "sentence", "." ]
b5551e345e583d04dbdf6b97dc2a43a266eec8d6
https://github.com/tonyseek/html5lib-truncation/blob/b5551e345e583d04dbdf6b97dc2a43a266eec8d6/html5lib_truncation/utils.py#L4-L29
244,019
quasipedia/swaggery
examples/async/async.py
AlwaysWin.always_win
def always_win(cls, request) -> [(200, 'Ok', String)]: '''Perform an always succeeding task.''' task_id = uuid4().hex.upper()[:5] log.info('Starting always OK task {}'.format(task_id)) for i in range(randint(0, MAX_LOOP_DURATION)): yield log.info('Finished always OK task {}'.format(task_id)) msg = 'I am finally done with task {}!'.format(task_id) Respond(200, msg)
python
def always_win(cls, request) -> [(200, 'Ok', String)]: '''Perform an always succeeding task.''' task_id = uuid4().hex.upper()[:5] log.info('Starting always OK task {}'.format(task_id)) for i in range(randint(0, MAX_LOOP_DURATION)): yield log.info('Finished always OK task {}'.format(task_id)) msg = 'I am finally done with task {}!'.format(task_id) Respond(200, msg)
[ "def", "always_win", "(", "cls", ",", "request", ")", "->", "[", "(", "200", ",", "'Ok'", ",", "String", ")", "]", ":", "task_id", "=", "uuid4", "(", ")", ".", "hex", ".", "upper", "(", ")", "[", ":", "5", "]", "log", ".", "info", "(", "'Starting always OK task {}'", ".", "format", "(", "task_id", ")", ")", "for", "i", "in", "range", "(", "randint", "(", "0", ",", "MAX_LOOP_DURATION", ")", ")", ":", "yield", "log", ".", "info", "(", "'Finished always OK task {}'", ".", "format", "(", "task_id", ")", ")", "msg", "=", "'I am finally done with task {}!'", ".", "format", "(", "task_id", ")", "Respond", "(", "200", ",", "msg", ")" ]
Perform an always succeeding task.
[ "Perform", "an", "always", "succeeding", "task", "." ]
89a2e1b2bebbc511c781c9e63972f65aef73cc2f
https://github.com/quasipedia/swaggery/blob/89a2e1b2bebbc511c781c9e63972f65aef73cc2f/examples/async/async.py#L49-L57
244,020
quasipedia/swaggery
examples/async/async.py
AlwaysFail.always_fail
def always_fail(cls, request) -> [ (200, 'Ok', String), (406, 'Not Acceptable', Void)]: '''Perform an always failing task.''' task_id = uuid4().hex.upper()[:5] log.info('Starting always FAILING task {}'.format(task_id)) for i in range(randint(0, MAX_LOOP_DURATION)): yield Respond(406) Respond(200, 'Foobar')
python
def always_fail(cls, request) -> [ (200, 'Ok', String), (406, 'Not Acceptable', Void)]: '''Perform an always failing task.''' task_id = uuid4().hex.upper()[:5] log.info('Starting always FAILING task {}'.format(task_id)) for i in range(randint(0, MAX_LOOP_DURATION)): yield Respond(406) Respond(200, 'Foobar')
[ "def", "always_fail", "(", "cls", ",", "request", ")", "->", "[", "(", "200", ",", "'Ok'", ",", "String", ")", ",", "(", "406", ",", "'Not Acceptable'", ",", "Void", ")", "]", ":", "task_id", "=", "uuid4", "(", ")", ".", "hex", ".", "upper", "(", ")", "[", ":", "5", "]", "log", ".", "info", "(", "'Starting always FAILING task {}'", ".", "format", "(", "task_id", ")", ")", "for", "i", "in", "range", "(", "randint", "(", "0", ",", "MAX_LOOP_DURATION", ")", ")", ":", "yield", "Respond", "(", "406", ")", "Respond", "(", "200", ",", "'Foobar'", ")" ]
Perform an always failing task.
[ "Perform", "an", "always", "failing", "task", "." ]
89a2e1b2bebbc511c781c9e63972f65aef73cc2f
https://github.com/quasipedia/swaggery/blob/89a2e1b2bebbc511c781c9e63972f65aef73cc2f/examples/async/async.py#L68-L77
244,021
quasipedia/swaggery
examples/async/async.py
Fibonacci.fibonacci
def fibonacci(cls, request, limit: (Ptypes.path, Integer('Upper limit of the series'))) -> [ (200, 'Ok', FibonacciFragment)]: '''Return Fibonacci sequence whose last number is <= limit.''' def fibonacci_generator(): last_two = (0, 1) while last_two[1] <= limit: log.debug('Fibonacci number generated: {}'.format(last_two[1])) yield last_two[1] last_two = last_two[1], sum(last_two) log.info('Starting Fibonacci generation, max: {}'.format(limit)) limit = int(limit) Respond(200, fibonacci_generator())
python
def fibonacci(cls, request, limit: (Ptypes.path, Integer('Upper limit of the series'))) -> [ (200, 'Ok', FibonacciFragment)]: '''Return Fibonacci sequence whose last number is <= limit.''' def fibonacci_generator(): last_two = (0, 1) while last_two[1] <= limit: log.debug('Fibonacci number generated: {}'.format(last_two[1])) yield last_two[1] last_two = last_two[1], sum(last_two) log.info('Starting Fibonacci generation, max: {}'.format(limit)) limit = int(limit) Respond(200, fibonacci_generator())
[ "def", "fibonacci", "(", "cls", ",", "request", ",", "limit", ":", "(", "Ptypes", ".", "path", ",", "Integer", "(", "'Upper limit of the series'", ")", ")", ")", "->", "[", "(", "200", ",", "'Ok'", ",", "FibonacciFragment", ")", "]", ":", "def", "fibonacci_generator", "(", ")", ":", "last_two", "=", "(", "0", ",", "1", ")", "while", "last_two", "[", "1", "]", "<=", "limit", ":", "log", ".", "debug", "(", "'Fibonacci number generated: {}'", ".", "format", "(", "last_two", "[", "1", "]", ")", ")", "yield", "last_two", "[", "1", "]", "last_two", "=", "last_two", "[", "1", "]", ",", "sum", "(", "last_two", ")", "log", ".", "info", "(", "'Starting Fibonacci generation, max: {}'", ".", "format", "(", "limit", ")", ")", "limit", "=", "int", "(", "limit", ")", "Respond", "(", "200", ",", "fibonacci_generator", "(", ")", ")" ]
Return Fibonacci sequence whose last number is <= limit.
[ "Return", "Fibonacci", "sequence", "whose", "last", "number", "is", "<", "=", "limit", "." ]
89a2e1b2bebbc511c781c9e63972f65aef73cc2f
https://github.com/quasipedia/swaggery/blob/89a2e1b2bebbc511c781c9e63972f65aef73cc2f/examples/async/async.py#L106-L119
244,022
quasipedia/swaggery
examples/async/async.py
QueryEcho.query_echo
def query_echo(cls, request, foo: (Ptypes.query, String('A query parameter'))) -> [ (200, 'Ok', String)]: '''Echo the query parameter.''' log.info('Echoing query param, value is: {}'.format(foo)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(foo) Respond(200, msg)
python
def query_echo(cls, request, foo: (Ptypes.query, String('A query parameter'))) -> [ (200, 'Ok', String)]: '''Echo the query parameter.''' log.info('Echoing query param, value is: {}'.format(foo)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(foo) Respond(200, msg)
[ "def", "query_echo", "(", "cls", ",", "request", ",", "foo", ":", "(", "Ptypes", ".", "query", ",", "String", "(", "'A query parameter'", ")", ")", ")", "->", "[", "(", "200", ",", "'Ok'", ",", "String", ")", "]", ":", "log", ".", "info", "(", "'Echoing query param, value is: {}'", ".", "format", "(", "foo", ")", ")", "for", "i", "in", "range", "(", "randint", "(", "0", ",", "MAX_LOOP_DURATION", ")", ")", ":", "yield", "msg", "=", "'The value sent was: {}'", ".", "format", "(", "foo", ")", "Respond", "(", "200", ",", "msg", ")" ]
Echo the query parameter.
[ "Echo", "the", "query", "parameter", "." ]
89a2e1b2bebbc511c781c9e63972f65aef73cc2f
https://github.com/quasipedia/swaggery/blob/89a2e1b2bebbc511c781c9e63972f65aef73cc2f/examples/async/async.py#L130-L138
244,023
quasipedia/swaggery
examples/async/async.py
BodyEcho.body_echo
def body_echo(cls, request, foo: (Ptypes.body, String('A body parameter'))) -> [ (200, 'Ok', String)]: '''Echo the body parameter.''' log.info('Echoing body param, value is: {}'.format(foo)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(foo) Respond(200, msg)
python
def body_echo(cls, request, foo: (Ptypes.body, String('A body parameter'))) -> [ (200, 'Ok', String)]: '''Echo the body parameter.''' log.info('Echoing body param, value is: {}'.format(foo)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(foo) Respond(200, msg)
[ "def", "body_echo", "(", "cls", ",", "request", ",", "foo", ":", "(", "Ptypes", ".", "body", ",", "String", "(", "'A body parameter'", ")", ")", ")", "->", "[", "(", "200", ",", "'Ok'", ",", "String", ")", "]", ":", "log", ".", "info", "(", "'Echoing body param, value is: {}'", ".", "format", "(", "foo", ")", ")", "for", "i", "in", "range", "(", "randint", "(", "0", ",", "MAX_LOOP_DURATION", ")", ")", ":", "yield", "msg", "=", "'The value sent was: {}'", ".", "format", "(", "foo", ")", "Respond", "(", "200", ",", "msg", ")" ]
Echo the body parameter.
[ "Echo", "the", "body", "parameter", "." ]
89a2e1b2bebbc511c781c9e63972f65aef73cc2f
https://github.com/quasipedia/swaggery/blob/89a2e1b2bebbc511c781c9e63972f65aef73cc2f/examples/async/async.py#L149-L157
244,024
quasipedia/swaggery
examples/async/async.py
HeaderEcho.header_echo
def header_echo(cls, request, api_key: (Ptypes.header, String('API key'))) -> [ (200, 'Ok', String)]: '''Echo the header parameter.''' log.info('Echoing header param, value is: {}'.format(api_key)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(api_key) Respond(200, msg)
python
def header_echo(cls, request, api_key: (Ptypes.header, String('API key'))) -> [ (200, 'Ok', String)]: '''Echo the header parameter.''' log.info('Echoing header param, value is: {}'.format(api_key)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(api_key) Respond(200, msg)
[ "def", "header_echo", "(", "cls", ",", "request", ",", "api_key", ":", "(", "Ptypes", ".", "header", ",", "String", "(", "'API key'", ")", ")", ")", "->", "[", "(", "200", ",", "'Ok'", ",", "String", ")", "]", ":", "log", ".", "info", "(", "'Echoing header param, value is: {}'", ".", "format", "(", "api_key", ")", ")", "for", "i", "in", "range", "(", "randint", "(", "0", ",", "MAX_LOOP_DURATION", ")", ")", ":", "yield", "msg", "=", "'The value sent was: {}'", ".", "format", "(", "api_key", ")", "Respond", "(", "200", ",", "msg", ")" ]
Echo the header parameter.
[ "Echo", "the", "header", "parameter", "." ]
89a2e1b2bebbc511c781c9e63972f65aef73cc2f
https://github.com/quasipedia/swaggery/blob/89a2e1b2bebbc511c781c9e63972f65aef73cc2f/examples/async/async.py#L168-L176
244,025
quasipedia/swaggery
examples/async/async.py
FormEcho.form_echo
def form_echo(cls, request, foo: (Ptypes.form, String('A form parameter'))) -> [ (200, 'Ok', String)]: '''Echo the form parameter.''' log.info('Echoing form param, value is: {}'.format(foo)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(foo) Respond(200, msg)
python
def form_echo(cls, request, foo: (Ptypes.form, String('A form parameter'))) -> [ (200, 'Ok', String)]: '''Echo the form parameter.''' log.info('Echoing form param, value is: {}'.format(foo)) for i in range(randint(0, MAX_LOOP_DURATION)): yield msg = 'The value sent was: {}'.format(foo) Respond(200, msg)
[ "def", "form_echo", "(", "cls", ",", "request", ",", "foo", ":", "(", "Ptypes", ".", "form", ",", "String", "(", "'A form parameter'", ")", ")", ")", "->", "[", "(", "200", ",", "'Ok'", ",", "String", ")", "]", ":", "log", ".", "info", "(", "'Echoing form param, value is: {}'", ".", "format", "(", "foo", ")", ")", "for", "i", "in", "range", "(", "randint", "(", "0", ",", "MAX_LOOP_DURATION", ")", ")", ":", "yield", "msg", "=", "'The value sent was: {}'", ".", "format", "(", "foo", ")", "Respond", "(", "200", ",", "msg", ")" ]
Echo the form parameter.
[ "Echo", "the", "form", "parameter", "." ]
89a2e1b2bebbc511c781c9e63972f65aef73cc2f
https://github.com/quasipedia/swaggery/blob/89a2e1b2bebbc511c781c9e63972f65aef73cc2f/examples/async/async.py#L187-L195
244,026
christophercrouzet/nani
nani.py
resolve
def resolve(data_type, name=None, listify_default=False): """Retrieve the properties for a given data type. This is the main routine where most of the work is done. It converts Nani's data types into properties that can be used to define a new NumPy array and to wrap it into a view object. Use :func:`validate` to check if the input data type is well-formed. Parameters ---------- data_type : nani data type Type of the array elements. name : str Name for the view to be generated for the array. listify_default : bool ``True`` to output the default values with lists in place of tuples. This might cause the output to be incompatible with array creation routines such as ``numpy.array`` but it should still work for element assignment. Returns ------- nani.Nani The properties to use to initalize a NumPy array around the data type. Examples -------- Create a NumPy array where each element represents a color: >>> import numpy >>> import nani >>> color_type = nani.Array( ... element_type=nani.Number(type=numpy.uint8, default=255), ... shape=3, ... view=None) >>> dtype, default, view = nani.resolve(color_type, name='Color') >>> a = numpy.array([default] * element_count, dtype=dtype) >>> v = view(a) >>> type(v) <class 'nani.Color'> >>> for color in v: ... color [255, 255, 255] [255, 255, 255] """ data_type = _consolidate(data_type) return Nani( dtype=numpy.dtype(_resolve_dtype(data_type)), default=_resolve_default(data_type, listify=listify_default), view=_resolve_view(Array(element_type=data_type, shape=-1, name=name)))
python
def resolve(data_type, name=None, listify_default=False): """Retrieve the properties for a given data type. This is the main routine where most of the work is done. It converts Nani's data types into properties that can be used to define a new NumPy array and to wrap it into a view object. Use :func:`validate` to check if the input data type is well-formed. Parameters ---------- data_type : nani data type Type of the array elements. name : str Name for the view to be generated for the array. listify_default : bool ``True`` to output the default values with lists in place of tuples. This might cause the output to be incompatible with array creation routines such as ``numpy.array`` but it should still work for element assignment. Returns ------- nani.Nani The properties to use to initalize a NumPy array around the data type. Examples -------- Create a NumPy array where each element represents a color: >>> import numpy >>> import nani >>> color_type = nani.Array( ... element_type=nani.Number(type=numpy.uint8, default=255), ... shape=3, ... view=None) >>> dtype, default, view = nani.resolve(color_type, name='Color') >>> a = numpy.array([default] * element_count, dtype=dtype) >>> v = view(a) >>> type(v) <class 'nani.Color'> >>> for color in v: ... color [255, 255, 255] [255, 255, 255] """ data_type = _consolidate(data_type) return Nani( dtype=numpy.dtype(_resolve_dtype(data_type)), default=_resolve_default(data_type, listify=listify_default), view=_resolve_view(Array(element_type=data_type, shape=-1, name=name)))
[ "def", "resolve", "(", "data_type", ",", "name", "=", "None", ",", "listify_default", "=", "False", ")", ":", "data_type", "=", "_consolidate", "(", "data_type", ")", "return", "Nani", "(", "dtype", "=", "numpy", ".", "dtype", "(", "_resolve_dtype", "(", "data_type", ")", ")", ",", "default", "=", "_resolve_default", "(", "data_type", ",", "listify", "=", "listify_default", ")", ",", "view", "=", "_resolve_view", "(", "Array", "(", "element_type", "=", "data_type", ",", "shape", "=", "-", "1", ",", "name", "=", "name", ")", ")", ")" ]
Retrieve the properties for a given data type. This is the main routine where most of the work is done. It converts Nani's data types into properties that can be used to define a new NumPy array and to wrap it into a view object. Use :func:`validate` to check if the input data type is well-formed. Parameters ---------- data_type : nani data type Type of the array elements. name : str Name for the view to be generated for the array. listify_default : bool ``True`` to output the default values with lists in place of tuples. This might cause the output to be incompatible with array creation routines such as ``numpy.array`` but it should still work for element assignment. Returns ------- nani.Nani The properties to use to initalize a NumPy array around the data type. Examples -------- Create a NumPy array where each element represents a color: >>> import numpy >>> import nani >>> color_type = nani.Array( ... element_type=nani.Number(type=numpy.uint8, default=255), ... shape=3, ... view=None) >>> dtype, default, view = nani.resolve(color_type, name='Color') >>> a = numpy.array([default] * element_count, dtype=dtype) >>> v = view(a) >>> type(v) <class 'nani.Color'> >>> for color in v: ... color [255, 255, 255] [255, 255, 255]
[ "Retrieve", "the", "properties", "for", "a", "given", "data", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L619-L669
244,027
christophercrouzet/nani
nani.py
_consolidate
def _consolidate(data_type): """Enforce the structure of the data type. Specifically, ensure that if a field is defined as a generic tuple, then it will be converted into an instance of `Field`. """ if isinstance(data_type, _ATOMIC): out = data_type elif isinstance(data_type, Array): element_type = _consolidate(data_type.element_type) out = data_type._replace(element_type=element_type) elif isinstance(data_type, Structure): fields = tuple( Field(*(_consolidate(field[i]) if i == _FIELD_TYPE_IDX else field[i] for i in _range(len(field)))) for field in data_type.fields) out = data_type._replace(fields=fields) return out
python
def _consolidate(data_type): """Enforce the structure of the data type. Specifically, ensure that if a field is defined as a generic tuple, then it will be converted into an instance of `Field`. """ if isinstance(data_type, _ATOMIC): out = data_type elif isinstance(data_type, Array): element_type = _consolidate(data_type.element_type) out = data_type._replace(element_type=element_type) elif isinstance(data_type, Structure): fields = tuple( Field(*(_consolidate(field[i]) if i == _FIELD_TYPE_IDX else field[i] for i in _range(len(field)))) for field in data_type.fields) out = data_type._replace(fields=fields) return out
[ "def", "_consolidate", "(", "data_type", ")", ":", "if", "isinstance", "(", "data_type", ",", "_ATOMIC", ")", ":", "out", "=", "data_type", "elif", "isinstance", "(", "data_type", ",", "Array", ")", ":", "element_type", "=", "_consolidate", "(", "data_type", ".", "element_type", ")", "out", "=", "data_type", ".", "_replace", "(", "element_type", "=", "element_type", ")", "elif", "isinstance", "(", "data_type", ",", "Structure", ")", ":", "fields", "=", "tuple", "(", "Field", "(", "*", "(", "_consolidate", "(", "field", "[", "i", "]", ")", "if", "i", "==", "_FIELD_TYPE_IDX", "else", "field", "[", "i", "]", "for", "i", "in", "_range", "(", "len", "(", "field", ")", ")", ")", ")", "for", "field", "in", "data_type", ".", "fields", ")", "out", "=", "data_type", ".", "_replace", "(", "fields", "=", "fields", ")", "return", "out" ]
Enforce the structure of the data type. Specifically, ensure that if a field is defined as a generic tuple, then it will be converted into an instance of `Field`.
[ "Enforce", "the", "structure", "of", "the", "data", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L888-L907
244,028
christophercrouzet/nani
nani.py
_resolve_dtype
def _resolve_dtype(data_type): """Retrieve the corresponding NumPy's `dtype` for a given data type.""" if isinstance(data_type, _FIXED_ATOMIC): out = _get_atomic_dtype(data_type) elif isinstance(data_type, _FLEXIBLE_ATOMIC): out = (_get_atomic_dtype(data_type), data_type.length) elif isinstance(data_type, Array): shape = data_type.shape if isinstance(shape, _SEQUENCE_TYPES) and len(shape) == 1: # Workaround the exception `ValueError: invalid itemsize in # generic type tuple` when an `Array` of shape 0 or (0,) is nested # within another `Array`. shape = shape[0] out = (_resolve_dtype(data_type.element_type), shape) elif isinstance(data_type, Structure): out = [(field.name, _resolve_dtype(field.type)) for field in data_type.fields] return out
python
def _resolve_dtype(data_type): """Retrieve the corresponding NumPy's `dtype` for a given data type.""" if isinstance(data_type, _FIXED_ATOMIC): out = _get_atomic_dtype(data_type) elif isinstance(data_type, _FLEXIBLE_ATOMIC): out = (_get_atomic_dtype(data_type), data_type.length) elif isinstance(data_type, Array): shape = data_type.shape if isinstance(shape, _SEQUENCE_TYPES) and len(shape) == 1: # Workaround the exception `ValueError: invalid itemsize in # generic type tuple` when an `Array` of shape 0 or (0,) is nested # within another `Array`. shape = shape[0] out = (_resolve_dtype(data_type.element_type), shape) elif isinstance(data_type, Structure): out = [(field.name, _resolve_dtype(field.type)) for field in data_type.fields] return out
[ "def", "_resolve_dtype", "(", "data_type", ")", ":", "if", "isinstance", "(", "data_type", ",", "_FIXED_ATOMIC", ")", ":", "out", "=", "_get_atomic_dtype", "(", "data_type", ")", "elif", "isinstance", "(", "data_type", ",", "_FLEXIBLE_ATOMIC", ")", ":", "out", "=", "(", "_get_atomic_dtype", "(", "data_type", ")", ",", "data_type", ".", "length", ")", "elif", "isinstance", "(", "data_type", ",", "Array", ")", ":", "shape", "=", "data_type", ".", "shape", "if", "isinstance", "(", "shape", ",", "_SEQUENCE_TYPES", ")", "and", "len", "(", "shape", ")", "==", "1", ":", "# Workaround the exception `ValueError: invalid itemsize in", "# generic type tuple` when an `Array` of shape 0 or (0,) is nested", "# within another `Array`.", "shape", "=", "shape", "[", "0", "]", "out", "=", "(", "_resolve_dtype", "(", "data_type", ".", "element_type", ")", ",", "shape", ")", "elif", "isinstance", "(", "data_type", ",", "Structure", ")", ":", "out", "=", "[", "(", "field", ".", "name", ",", "_resolve_dtype", "(", "field", ".", "type", ")", ")", "for", "field", "in", "data_type", ".", "fields", "]", "return", "out" ]
Retrieve the corresponding NumPy's `dtype` for a given data type.
[ "Retrieve", "the", "corresponding", "NumPy", "s", "dtype", "for", "a", "given", "data", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L910-L929
244,029
christophercrouzet/nani
nani.py
_resolve_default
def _resolve_default(data_type, listify=False): """Retrieve the default value for a given data type.""" if isinstance(data_type, _ATOMIC): # A Python's object type needs to be left as is instead of being # wrapped into a NumPy type. out = (data_type.default if isinstance(data_type, Object) else _get_atomic_dtype(data_type)(data_type.default)) elif isinstance(data_type, Array): element_default = _resolve_default(data_type.element_type, listify=listify) Sequence = list if listify else tuple shape = ((data_type.shape,) if isinstance(data_type.shape, int) else data_type.shape) out = element_default for dimension in shape: out = Sequence(copy.deepcopy(out) for _ in _range(dimension)) elif isinstance(data_type, Structure): if listify: out = [_resolve_default(field.type, listify=listify) for field in data_type.fields] else: field_defaults = collections.OrderedDict( (field.name, _resolve_default(field.type, listify=listify)) for field in data_type.fields) name = ('StructureDefault_%s' % (data_type.name,) if data_type.name else 'StructureDefault') struct = collections.namedtuple(name, field_defaults.keys()) out = struct(**field_defaults) return out
python
def _resolve_default(data_type, listify=False): """Retrieve the default value for a given data type.""" if isinstance(data_type, _ATOMIC): # A Python's object type needs to be left as is instead of being # wrapped into a NumPy type. out = (data_type.default if isinstance(data_type, Object) else _get_atomic_dtype(data_type)(data_type.default)) elif isinstance(data_type, Array): element_default = _resolve_default(data_type.element_type, listify=listify) Sequence = list if listify else tuple shape = ((data_type.shape,) if isinstance(data_type.shape, int) else data_type.shape) out = element_default for dimension in shape: out = Sequence(copy.deepcopy(out) for _ in _range(dimension)) elif isinstance(data_type, Structure): if listify: out = [_resolve_default(field.type, listify=listify) for field in data_type.fields] else: field_defaults = collections.OrderedDict( (field.name, _resolve_default(field.type, listify=listify)) for field in data_type.fields) name = ('StructureDefault_%s' % (data_type.name,) if data_type.name else 'StructureDefault') struct = collections.namedtuple(name, field_defaults.keys()) out = struct(**field_defaults) return out
[ "def", "_resolve_default", "(", "data_type", ",", "listify", "=", "False", ")", ":", "if", "isinstance", "(", "data_type", ",", "_ATOMIC", ")", ":", "# A Python's object type needs to be left as is instead of being", "# wrapped into a NumPy type.", "out", "=", "(", "data_type", ".", "default", "if", "isinstance", "(", "data_type", ",", "Object", ")", "else", "_get_atomic_dtype", "(", "data_type", ")", "(", "data_type", ".", "default", ")", ")", "elif", "isinstance", "(", "data_type", ",", "Array", ")", ":", "element_default", "=", "_resolve_default", "(", "data_type", ".", "element_type", ",", "listify", "=", "listify", ")", "Sequence", "=", "list", "if", "listify", "else", "tuple", "shape", "=", "(", "(", "data_type", ".", "shape", ",", ")", "if", "isinstance", "(", "data_type", ".", "shape", ",", "int", ")", "else", "data_type", ".", "shape", ")", "out", "=", "element_default", "for", "dimension", "in", "shape", ":", "out", "=", "Sequence", "(", "copy", ".", "deepcopy", "(", "out", ")", "for", "_", "in", "_range", "(", "dimension", ")", ")", "elif", "isinstance", "(", "data_type", ",", "Structure", ")", ":", "if", "listify", ":", "out", "=", "[", "_resolve_default", "(", "field", ".", "type", ",", "listify", "=", "listify", ")", "for", "field", "in", "data_type", ".", "fields", "]", "else", ":", "field_defaults", "=", "collections", ".", "OrderedDict", "(", "(", "field", ".", "name", ",", "_resolve_default", "(", "field", ".", "type", ",", "listify", "=", "listify", ")", ")", "for", "field", "in", "data_type", ".", "fields", ")", "name", "=", "(", "'StructureDefault_%s'", "%", "(", "data_type", ".", "name", ",", ")", "if", "data_type", ".", "name", "else", "'StructureDefault'", ")", "struct", "=", "collections", ".", "namedtuple", "(", "name", ",", "field_defaults", ".", "keys", "(", ")", ")", "out", "=", "struct", "(", "*", "*", "field_defaults", ")", "return", "out" ]
Retrieve the default value for a given data type.
[ "Retrieve", "the", "default", "value", "for", "a", "given", "data", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L932-L961
244,030
christophercrouzet/nani
nani.py
_resolve_view
def _resolve_view(data_type): """Retrieve the view for a given data type. Only one view class is returned, that is the one representing the root data type, but more class objects might be dynamically created if the input data type has nested elements, such as for the `Array` and `Structure` types. The default behaviour of dynamically and recursively creating a new view class can be overriden by setting the `view` attribute of a data type. """ view = getattr(data_type, 'view', None) if view is not None: return view if isinstance(data_type, _ATOMIC): out = None elif isinstance(data_type, Array): out = _define_array_view(data_type) elif isinstance(data_type, Structure): out = _define_structure_view(data_type) return out
python
def _resolve_view(data_type): """Retrieve the view for a given data type. Only one view class is returned, that is the one representing the root data type, but more class objects might be dynamically created if the input data type has nested elements, such as for the `Array` and `Structure` types. The default behaviour of dynamically and recursively creating a new view class can be overriden by setting the `view` attribute of a data type. """ view = getattr(data_type, 'view', None) if view is not None: return view if isinstance(data_type, _ATOMIC): out = None elif isinstance(data_type, Array): out = _define_array_view(data_type) elif isinstance(data_type, Structure): out = _define_structure_view(data_type) return out
[ "def", "_resolve_view", "(", "data_type", ")", ":", "view", "=", "getattr", "(", "data_type", ",", "'view'", ",", "None", ")", "if", "view", "is", "not", "None", ":", "return", "view", "if", "isinstance", "(", "data_type", ",", "_ATOMIC", ")", ":", "out", "=", "None", "elif", "isinstance", "(", "data_type", ",", "Array", ")", ":", "out", "=", "_define_array_view", "(", "data_type", ")", "elif", "isinstance", "(", "data_type", ",", "Structure", ")", ":", "out", "=", "_define_structure_view", "(", "data_type", ")", "return", "out" ]
Retrieve the view for a given data type. Only one view class is returned, that is the one representing the root data type, but more class objects might be dynamically created if the input data type has nested elements, such as for the `Array` and `Structure` types. The default behaviour of dynamically and recursively creating a new view class can be overriden by setting the `view` attribute of a data type.
[ "Retrieve", "the", "view", "for", "a", "given", "data", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L964-L986
244,031
christophercrouzet/nani
nani.py
_define_array_view
def _define_array_view(data_type): """Define a new view object for a `Array` type.""" element_type = data_type.element_type element_view = _resolve_view(element_type) if element_view is None: mixins = (_DirectArrayViewMixin,) attributes = _get_mixin_attributes(mixins) elif isinstance(element_type, _ATOMIC): mixins = (_IndirectAtomicArrayViewMixin,) attributes = _get_mixin_attributes(mixins) attributes.update({ '_element_view': element_view, }) else: mixins = (_IndirectCompositeArrayViewMixin,) attributes = _get_mixin_attributes(mixins) attributes.update({ '_element_view': element_view, }) name = data_type.name if data_type.name else 'ArrayView' return type(name, (), attributes)
python
def _define_array_view(data_type): """Define a new view object for a `Array` type.""" element_type = data_type.element_type element_view = _resolve_view(element_type) if element_view is None: mixins = (_DirectArrayViewMixin,) attributes = _get_mixin_attributes(mixins) elif isinstance(element_type, _ATOMIC): mixins = (_IndirectAtomicArrayViewMixin,) attributes = _get_mixin_attributes(mixins) attributes.update({ '_element_view': element_view, }) else: mixins = (_IndirectCompositeArrayViewMixin,) attributes = _get_mixin_attributes(mixins) attributes.update({ '_element_view': element_view, }) name = data_type.name if data_type.name else 'ArrayView' return type(name, (), attributes)
[ "def", "_define_array_view", "(", "data_type", ")", ":", "element_type", "=", "data_type", ".", "element_type", "element_view", "=", "_resolve_view", "(", "element_type", ")", "if", "element_view", "is", "None", ":", "mixins", "=", "(", "_DirectArrayViewMixin", ",", ")", "attributes", "=", "_get_mixin_attributes", "(", "mixins", ")", "elif", "isinstance", "(", "element_type", ",", "_ATOMIC", ")", ":", "mixins", "=", "(", "_IndirectAtomicArrayViewMixin", ",", ")", "attributes", "=", "_get_mixin_attributes", "(", "mixins", ")", "attributes", ".", "update", "(", "{", "'_element_view'", ":", "element_view", ",", "}", ")", "else", ":", "mixins", "=", "(", "_IndirectCompositeArrayViewMixin", ",", ")", "attributes", "=", "_get_mixin_attributes", "(", "mixins", ")", "attributes", ".", "update", "(", "{", "'_element_view'", ":", "element_view", ",", "}", ")", "name", "=", "data_type", ".", "name", "if", "data_type", ".", "name", "else", "'ArrayView'", "return", "type", "(", "name", ",", "(", ")", ",", "attributes", ")" ]
Define a new view object for a `Array` type.
[ "Define", "a", "new", "view", "object", "for", "a", "Array", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L989-L1010
244,032
christophercrouzet/nani
nani.py
_define_structure_view
def _define_structure_view(data_type): """Define a new view object for a `Structure` type.""" def define_getter(field_index, field_type, field_view): if field_view is None: def getter(self): return self._data[field_index] elif isinstance(field_type, _ATOMIC): def getter(self): return field_view(self._data, field_index) else: def getter(self): return field_view(self._data[field_index]) return getter def define_setter(field_index, read_only): def setter(self, value): self._data[field_index] = value return None if read_only else setter field_views = [_resolve_view(field.type) for field in data_type.fields] mixins = (_StructuredViewMixin,) attributes = _get_mixin_attributes(mixins) attributes.update({ '_fields': tuple(field.name for field in data_type.fields), }) attributes.update({ field.name: property( fget=define_getter(i, field.type, field_view), fset=define_setter(i, field.read_only), fdel=None) for i, (field, field_view) in enumerate(zip(data_type.fields, field_views))}) name = data_type.name if data_type.name else 'StructureView' return type(name, (), attributes)
python
def _define_structure_view(data_type): """Define a new view object for a `Structure` type.""" def define_getter(field_index, field_type, field_view): if field_view is None: def getter(self): return self._data[field_index] elif isinstance(field_type, _ATOMIC): def getter(self): return field_view(self._data, field_index) else: def getter(self): return field_view(self._data[field_index]) return getter def define_setter(field_index, read_only): def setter(self, value): self._data[field_index] = value return None if read_only else setter field_views = [_resolve_view(field.type) for field in data_type.fields] mixins = (_StructuredViewMixin,) attributes = _get_mixin_attributes(mixins) attributes.update({ '_fields': tuple(field.name for field in data_type.fields), }) attributes.update({ field.name: property( fget=define_getter(i, field.type, field_view), fset=define_setter(i, field.read_only), fdel=None) for i, (field, field_view) in enumerate(zip(data_type.fields, field_views))}) name = data_type.name if data_type.name else 'StructureView' return type(name, (), attributes)
[ "def", "_define_structure_view", "(", "data_type", ")", ":", "def", "define_getter", "(", "field_index", ",", "field_type", ",", "field_view", ")", ":", "if", "field_view", "is", "None", ":", "def", "getter", "(", "self", ")", ":", "return", "self", ".", "_data", "[", "field_index", "]", "elif", "isinstance", "(", "field_type", ",", "_ATOMIC", ")", ":", "def", "getter", "(", "self", ")", ":", "return", "field_view", "(", "self", ".", "_data", ",", "field_index", ")", "else", ":", "def", "getter", "(", "self", ")", ":", "return", "field_view", "(", "self", ".", "_data", "[", "field_index", "]", ")", "return", "getter", "def", "define_setter", "(", "field_index", ",", "read_only", ")", ":", "def", "setter", "(", "self", ",", "value", ")", ":", "self", ".", "_data", "[", "field_index", "]", "=", "value", "return", "None", "if", "read_only", "else", "setter", "field_views", "=", "[", "_resolve_view", "(", "field", ".", "type", ")", "for", "field", "in", "data_type", ".", "fields", "]", "mixins", "=", "(", "_StructuredViewMixin", ",", ")", "attributes", "=", "_get_mixin_attributes", "(", "mixins", ")", "attributes", ".", "update", "(", "{", "'_fields'", ":", "tuple", "(", "field", ".", "name", "for", "field", "in", "data_type", ".", "fields", ")", ",", "}", ")", "attributes", ".", "update", "(", "{", "field", ".", "name", ":", "property", "(", "fget", "=", "define_getter", "(", "i", ",", "field", ".", "type", ",", "field_view", ")", ",", "fset", "=", "define_setter", "(", "i", ",", "field", ".", "read_only", ")", ",", "fdel", "=", "None", ")", "for", "i", ",", "(", "field", ",", "field_view", ")", "in", "enumerate", "(", "zip", "(", "data_type", ".", "fields", ",", "field_views", ")", ")", "}", ")", "name", "=", "data_type", ".", "name", "if", "data_type", ".", "name", "else", "'StructureView'", "return", "type", "(", "name", ",", "(", ")", ",", "attributes", ")" ]
Define a new view object for a `Structure` type.
[ "Define", "a", "new", "view", "object", "for", "a", "Structure", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1013-L1048
244,033
christophercrouzet/nani
nani.py
_get_mixin_attributes
def _get_mixin_attributes(mixins): """Retrieve the attributes for a given set of mixin classes. The attributes of each mixin class are being merged into a single dictionary. """ return {attribute: mixin.__dict__[attribute] for mixin in mixins for attribute in _MIXIN_ATTRIBUTES[mixin]}
python
def _get_mixin_attributes(mixins): """Retrieve the attributes for a given set of mixin classes. The attributes of each mixin class are being merged into a single dictionary. """ return {attribute: mixin.__dict__[attribute] for mixin in mixins for attribute in _MIXIN_ATTRIBUTES[mixin]}
[ "def", "_get_mixin_attributes", "(", "mixins", ")", ":", "return", "{", "attribute", ":", "mixin", ".", "__dict__", "[", "attribute", "]", "for", "mixin", "in", "mixins", "for", "attribute", "in", "_MIXIN_ATTRIBUTES", "[", "mixin", "]", "}" ]
Retrieve the attributes for a given set of mixin classes. The attributes of each mixin class are being merged into a single dictionary.
[ "Retrieve", "the", "attributes", "for", "a", "given", "set", "of", "mixin", "classes", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1051-L1059
244,034
christophercrouzet/nani
nani.py
_get_atomic_dtype
def _get_atomic_dtype(data_type): """Retrieve the NumPy's `dtype` for a given atomic data type.""" atomic_type = getattr(data_type, 'type', None) if atomic_type is not None: return atomic_type return _PREDEFINED_ATOMIC_NUMPY_TYPES[_find_base_type(data_type)]
python
def _get_atomic_dtype(data_type): """Retrieve the NumPy's `dtype` for a given atomic data type.""" atomic_type = getattr(data_type, 'type', None) if atomic_type is not None: return atomic_type return _PREDEFINED_ATOMIC_NUMPY_TYPES[_find_base_type(data_type)]
[ "def", "_get_atomic_dtype", "(", "data_type", ")", ":", "atomic_type", "=", "getattr", "(", "data_type", ",", "'type'", ",", "None", ")", "if", "atomic_type", "is", "not", "None", ":", "return", "atomic_type", "return", "_PREDEFINED_ATOMIC_NUMPY_TYPES", "[", "_find_base_type", "(", "data_type", ")", "]" ]
Retrieve the NumPy's `dtype` for a given atomic data type.
[ "Retrieve", "the", "NumPy", "s", "dtype", "for", "a", "given", "atomic", "data", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1062-L1068
244,035
christophercrouzet/nani
nani.py
_find_base_type
def _find_base_type(data_type): """Find the Nani's base type for a given data type. This is useful when Nani's data types were subclassed and the original type is required. """ bases = type(data_type).__mro__ for base in bases: if base in _ALL: return base return None
python
def _find_base_type(data_type): """Find the Nani's base type for a given data type. This is useful when Nani's data types were subclassed and the original type is required. """ bases = type(data_type).__mro__ for base in bases: if base in _ALL: return base return None
[ "def", "_find_base_type", "(", "data_type", ")", ":", "bases", "=", "type", "(", "data_type", ")", ".", "__mro__", "for", "base", "in", "bases", ":", "if", "base", "in", "_ALL", ":", "return", "base", "return", "None" ]
Find the Nani's base type for a given data type. This is useful when Nani's data types were subclassed and the original type is required.
[ "Find", "the", "Nani", "s", "base", "type", "for", "a", "given", "data", "type", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1071-L1082
244,036
christophercrouzet/nani
nani.py
_find_duplicates
def _find_duplicates(seq): """Find the duplicate elements from a sequence.""" seen = set() return [element for element in seq if seq.count(element) > 1 and element not in seen and seen.add(element) is None]
python
def _find_duplicates(seq): """Find the duplicate elements from a sequence.""" seen = set() return [element for element in seq if seq.count(element) > 1 and element not in seen and seen.add(element) is None]
[ "def", "_find_duplicates", "(", "seq", ")", ":", "seen", "=", "set", "(", ")", "return", "[", "element", "for", "element", "in", "seq", "if", "seq", ".", "count", "(", "element", ")", ">", "1", "and", "element", "not", "in", "seen", "and", "seen", ".", "add", "(", "element", ")", "is", "None", "]" ]
Find the duplicate elements from a sequence.
[ "Find", "the", "duplicate", "elements", "from", "a", "sequence", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1085-L1090
244,037
christophercrouzet/nani
nani.py
_format_type
def _format_type(cls): """Format a type name for printing.""" if cls.__module__ == _BUILTIN_MODULE: return cls.__name__ else: return '%s.%s' % (cls.__module__, cls.__name__)
python
def _format_type(cls): """Format a type name for printing.""" if cls.__module__ == _BUILTIN_MODULE: return cls.__name__ else: return '%s.%s' % (cls.__module__, cls.__name__)
[ "def", "_format_type", "(", "cls", ")", ":", "if", "cls", ".", "__module__", "==", "_BUILTIN_MODULE", ":", "return", "cls", ".", "__name__", "else", ":", "return", "'%s.%s'", "%", "(", "cls", ".", "__module__", ",", "cls", ".", "__name__", ")" ]
Format a type name for printing.
[ "Format", "a", "type", "name", "for", "printing", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1093-L1098
244,038
christophercrouzet/nani
nani.py
_format_element
def _format_element(element, count, index, last_separator): """Format an element from a sequence. This only prepends a separator for the last element and wraps each element with single quotes. """ return ("%s'%s'" % (last_separator, element) if count > 1 and index == count - 1 else "'%s'" % (element,))
python
def _format_element(element, count, index, last_separator): """Format an element from a sequence. This only prepends a separator for the last element and wraps each element with single quotes. """ return ("%s'%s'" % (last_separator, element) if count > 1 and index == count - 1 else "'%s'" % (element,))
[ "def", "_format_element", "(", "element", ",", "count", ",", "index", ",", "last_separator", ")", ":", "return", "(", "\"%s'%s'\"", "%", "(", "last_separator", ",", "element", ")", "if", "count", ">", "1", "and", "index", "==", "count", "-", "1", "else", "\"'%s'\"", "%", "(", "element", ",", ")", ")" ]
Format an element from a sequence. This only prepends a separator for the last element and wraps each element with single quotes.
[ "Format", "an", "element", "from", "a", "sequence", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1101-L1109
244,039
christophercrouzet/nani
nani.py
_join_sequence
def _join_sequence(seq, last_separator=''): """Join a sequence into a string.""" count = len(seq) return ', '.join(_format_element(element, count, i, last_separator) for i, element in enumerate(seq))
python
def _join_sequence(seq, last_separator=''): """Join a sequence into a string.""" count = len(seq) return ', '.join(_format_element(element, count, i, last_separator) for i, element in enumerate(seq))
[ "def", "_join_sequence", "(", "seq", ",", "last_separator", "=", "''", ")", ":", "count", "=", "len", "(", "seq", ")", "return", "', '", ".", "join", "(", "_format_element", "(", "element", ",", "count", ",", "i", ",", "last_separator", ")", "for", "i", ",", "element", "in", "enumerate", "(", "seq", ")", ")" ]
Join a sequence into a string.
[ "Join", "a", "sequence", "into", "a", "string", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1112-L1116
244,040
christophercrouzet/nani
nani.py
_join_types
def _join_types(seq, last_separator=''): """Join class object names into a string.""" class_names = [_format_type(cls) for cls in seq] return _join_sequence(class_names, last_separator)
python
def _join_types(seq, last_separator=''): """Join class object names into a string.""" class_names = [_format_type(cls) for cls in seq] return _join_sequence(class_names, last_separator)
[ "def", "_join_types", "(", "seq", ",", "last_separator", "=", "''", ")", ":", "class_names", "=", "[", "_format_type", "(", "cls", ")", "for", "cls", "in", "seq", "]", "return", "_join_sequence", "(", "class_names", ",", "last_separator", ")" ]
Join class object names into a string.
[ "Join", "class", "object", "names", "into", "a", "string", "." ]
296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1
https://github.com/christophercrouzet/nani/blob/296ae50c0cdcfd3ed0cba23a4d2edea0d124bcb1/nani.py#L1119-L1122
244,041
lambdalisue/maidenhair
src/maidenhair/loaders/base.py
slice_columns
def slice_columns(x, using=None): """ Slice a numpy array to make columns Parameters ---------- x : ndarray A numpy array instance using : list of integer or slice instance or None, optional A list of index or slice instance Returns ------- ndarray A list of numpy array columns sliced """ if using is None: using = range(0, len(x[0])) return [x[:,s] for s in using]
python
def slice_columns(x, using=None): """ Slice a numpy array to make columns Parameters ---------- x : ndarray A numpy array instance using : list of integer or slice instance or None, optional A list of index or slice instance Returns ------- ndarray A list of numpy array columns sliced """ if using is None: using = range(0, len(x[0])) return [x[:,s] for s in using]
[ "def", "slice_columns", "(", "x", ",", "using", "=", "None", ")", ":", "if", "using", "is", "None", ":", "using", "=", "range", "(", "0", ",", "len", "(", "x", "[", "0", "]", ")", ")", "return", "[", "x", "[", ":", ",", "s", "]", "for", "s", "in", "using", "]" ]
Slice a numpy array to make columns Parameters ---------- x : ndarray A numpy array instance using : list of integer or slice instance or None, optional A list of index or slice instance Returns ------- ndarray A list of numpy array columns sliced
[ "Slice", "a", "numpy", "array", "to", "make", "columns" ]
d5095c1087d1f4d71cc57410492151d2803a9f0d
https://github.com/lambdalisue/maidenhair/blob/d5095c1087d1f4d71cc57410492151d2803a9f0d/src/maidenhair/loaders/base.py#L156-L175
244,042
lambdalisue/maidenhair
src/maidenhair/loaders/base.py
unite_dataset
def unite_dataset(dataset, basecolumn=0): """ Unite dataset into a single data Parameters ---------- dataset : list of ndarray A data list of a column list of a numpy arrays basecolumn : integer, optional An index of base column. All data will be trimmed based on the order of this column when the number of samples are different among the dataset Returns ------- list of numpy array A column list of a numpy array """ ndata = [None] * len(dataset[0]) for pdata in dataset: # select basecolumn bnx = ndata[basecolumn] bpx = pdata[basecolumn] if bnx is not None and bnx.ndim >= 2: bnx = bnx[:,-1] if bpx is not None and bpx.ndim >= 2: bpx = bpx[:,-1] # calculate min and max of this and final data if bnx is not None and len(bnx) != len(bpx): # the number of samples is different, so regulation is required xmin = max(np.min(bnx), np.min(bpx)) xmax = min(np.max(bnx), np.max(bpx)) # slice the data nindex = np.where((bnx>xmin) & (bnx<xmax)) pindex = np.where((bpx>xmin) & (bpx<xmax)) else: nindex = None pindex = None for i, (nx, px) in enumerate(itertools.izip(ndata, pdata)): if nindex: nx = nx[nindex] if pindex: px = px[pindex] ndata[i] = px if nx is None else np.c_[nx, px] return [ndata]
python
def unite_dataset(dataset, basecolumn=0): """ Unite dataset into a single data Parameters ---------- dataset : list of ndarray A data list of a column list of a numpy arrays basecolumn : integer, optional An index of base column. All data will be trimmed based on the order of this column when the number of samples are different among the dataset Returns ------- list of numpy array A column list of a numpy array """ ndata = [None] * len(dataset[0]) for pdata in dataset: # select basecolumn bnx = ndata[basecolumn] bpx = pdata[basecolumn] if bnx is not None and bnx.ndim >= 2: bnx = bnx[:,-1] if bpx is not None and bpx.ndim >= 2: bpx = bpx[:,-1] # calculate min and max of this and final data if bnx is not None and len(bnx) != len(bpx): # the number of samples is different, so regulation is required xmin = max(np.min(bnx), np.min(bpx)) xmax = min(np.max(bnx), np.max(bpx)) # slice the data nindex = np.where((bnx>xmin) & (bnx<xmax)) pindex = np.where((bpx>xmin) & (bpx<xmax)) else: nindex = None pindex = None for i, (nx, px) in enumerate(itertools.izip(ndata, pdata)): if nindex: nx = nx[nindex] if pindex: px = px[pindex] ndata[i] = px if nx is None else np.c_[nx, px] return [ndata]
[ "def", "unite_dataset", "(", "dataset", ",", "basecolumn", "=", "0", ")", ":", "ndata", "=", "[", "None", "]", "*", "len", "(", "dataset", "[", "0", "]", ")", "for", "pdata", "in", "dataset", ":", "# select basecolumn", "bnx", "=", "ndata", "[", "basecolumn", "]", "bpx", "=", "pdata", "[", "basecolumn", "]", "if", "bnx", "is", "not", "None", "and", "bnx", ".", "ndim", ">=", "2", ":", "bnx", "=", "bnx", "[", ":", ",", "-", "1", "]", "if", "bpx", "is", "not", "None", "and", "bpx", ".", "ndim", ">=", "2", ":", "bpx", "=", "bpx", "[", ":", ",", "-", "1", "]", "# calculate min and max of this and final data", "if", "bnx", "is", "not", "None", "and", "len", "(", "bnx", ")", "!=", "len", "(", "bpx", ")", ":", "# the number of samples is different, so regulation is required", "xmin", "=", "max", "(", "np", ".", "min", "(", "bnx", ")", ",", "np", ".", "min", "(", "bpx", ")", ")", "xmax", "=", "min", "(", "np", ".", "max", "(", "bnx", ")", ",", "np", ".", "max", "(", "bpx", ")", ")", "# slice the data", "nindex", "=", "np", ".", "where", "(", "(", "bnx", ">", "xmin", ")", "&", "(", "bnx", "<", "xmax", ")", ")", "pindex", "=", "np", ".", "where", "(", "(", "bpx", ">", "xmin", ")", "&", "(", "bpx", "<", "xmax", ")", ")", "else", ":", "nindex", "=", "None", "pindex", "=", "None", "for", "i", ",", "(", "nx", ",", "px", ")", "in", "enumerate", "(", "itertools", ".", "izip", "(", "ndata", ",", "pdata", ")", ")", ":", "if", "nindex", ":", "nx", "=", "nx", "[", "nindex", "]", "if", "pindex", ":", "px", "=", "px", "[", "pindex", "]", "ndata", "[", "i", "]", "=", "px", "if", "nx", "is", "None", "else", "np", ".", "c_", "[", "nx", ",", "px", "]", "return", "[", "ndata", "]" ]
Unite dataset into a single data Parameters ---------- dataset : list of ndarray A data list of a column list of a numpy arrays basecolumn : integer, optional An index of base column. All data will be trimmed based on the order of this column when the number of samples are different among the dataset Returns ------- list of numpy array A column list of a numpy array
[ "Unite", "dataset", "into", "a", "single", "data" ]
d5095c1087d1f4d71cc57410492151d2803a9f0d
https://github.com/lambdalisue/maidenhair/blob/d5095c1087d1f4d71cc57410492151d2803a9f0d/src/maidenhair/loaders/base.py#L177-L222
244,043
lambdalisue/maidenhair
src/maidenhair/loaders/base.py
BaseLoader.load
def load(self, filename, using=None, parser=None, **kwargs): """ Load data from file using a specified parser. Return value will be separated or sliced into a column list Parameters ---------- filename : string A data file path using : list of integer, slice instance, or None, optional A list of index or slice instance used to slice data into column If it is not specified, :attr:`using` specified in constructor will be used instead. parser : instance or None, optional An instance or registered name of parser class. If it is not specified, :attr:`parser` specified in constructor will be used instead. Returns ------- ndarray A list of numpy array """ using = using or self.using parser = parser or self.parser if parser is None: raise AttributeError("A parser instance must be specified") # parse iterator with the specified parser data = parser.load(filename, **kwargs) # slice column by using return slice_columns(data, using)
python
def load(self, filename, using=None, parser=None, **kwargs): """ Load data from file using a specified parser. Return value will be separated or sliced into a column list Parameters ---------- filename : string A data file path using : list of integer, slice instance, or None, optional A list of index or slice instance used to slice data into column If it is not specified, :attr:`using` specified in constructor will be used instead. parser : instance or None, optional An instance or registered name of parser class. If it is not specified, :attr:`parser` specified in constructor will be used instead. Returns ------- ndarray A list of numpy array """ using = using or self.using parser = parser or self.parser if parser is None: raise AttributeError("A parser instance must be specified") # parse iterator with the specified parser data = parser.load(filename, **kwargs) # slice column by using return slice_columns(data, using)
[ "def", "load", "(", "self", ",", "filename", ",", "using", "=", "None", ",", "parser", "=", "None", ",", "*", "*", "kwargs", ")", ":", "using", "=", "using", "or", "self", ".", "using", "parser", "=", "parser", "or", "self", ".", "parser", "if", "parser", "is", "None", ":", "raise", "AttributeError", "(", "\"A parser instance must be specified\"", ")", "# parse iterator with the specified parser", "data", "=", "parser", ".", "load", "(", "filename", ",", "*", "*", "kwargs", ")", "# slice column by using", "return", "slice_columns", "(", "data", ",", "using", ")" ]
Load data from file using a specified parser. Return value will be separated or sliced into a column list Parameters ---------- filename : string A data file path using : list of integer, slice instance, or None, optional A list of index or slice instance used to slice data into column If it is not specified, :attr:`using` specified in constructor will be used instead. parser : instance or None, optional An instance or registered name of parser class. If it is not specified, :attr:`parser` specified in constructor will be used instead. Returns ------- ndarray A list of numpy array
[ "Load", "data", "from", "file", "using", "a", "specified", "parser", "." ]
d5095c1087d1f4d71cc57410492151d2803a9f0d
https://github.com/lambdalisue/maidenhair/blob/d5095c1087d1f4d71cc57410492151d2803a9f0d/src/maidenhair/loaders/base.py#L40-L72
244,044
lambdalisue/django-roughpages
src/roughpages/backends/auth.py
AuthTemplateFilenameBackend.prepare_filenames
def prepare_filenames(self, normalized_url, request): """ Prepare template filename list based on the user authenticated state If user is authenticated user, it use '_authenticated' as a suffix. Otherwise it use '_anonymous' as a suffix to produce the template filename list. The list include original filename at the end of the list. Args: normalized_url (str): A normalized url request (instance): An instance of HttpRequest Returns: list Examples: >>> from mock import MagicMock >>> request = MagicMock() >>> backend = AuthTemplateFilenameBackend() >>> request.user.is_authenticated.return_value = True >>> filenames = backend.prepare_filenames('foo/bar/hogehoge', ... request) >>> assert filenames == [ ... 'foo/bar/hogehoge_authenticated.html', ... 'foo/bar/hogehoge.html' ... ] >>> request.user.is_authenticated.return_value = False >>> filenames = backend.prepare_filenames('foo/bar/hogehoge', ... request) >>> assert filenames == [ ... 'foo/bar/hogehoge_anonymous.html', ... 'foo/bar/hogehoge.html' ... ] >>> request.user.is_authenticated.return_value = True >>> filenames = backend.prepare_filenames('', ... request) >>> assert filenames == [ ... 'index_authenticated.html', ... 'index.html' ... ] >>> request.user.is_authenticated.return_value = False >>> filenames = backend.prepare_filenames('', ... request) >>> assert filenames == [ ... 'index_anonymous.html', ... 'index.html' ... ] """ filenames = [normalized_url] if request.user.is_authenticated(): filenames.insert(0, normalized_url + ".authenticated") else: filenames.insert(0, normalized_url + ".anonymous") return filenames
python
def prepare_filenames(self, normalized_url, request): """ Prepare template filename list based on the user authenticated state If user is authenticated user, it use '_authenticated' as a suffix. Otherwise it use '_anonymous' as a suffix to produce the template filename list. The list include original filename at the end of the list. Args: normalized_url (str): A normalized url request (instance): An instance of HttpRequest Returns: list Examples: >>> from mock import MagicMock >>> request = MagicMock() >>> backend = AuthTemplateFilenameBackend() >>> request.user.is_authenticated.return_value = True >>> filenames = backend.prepare_filenames('foo/bar/hogehoge', ... request) >>> assert filenames == [ ... 'foo/bar/hogehoge_authenticated.html', ... 'foo/bar/hogehoge.html' ... ] >>> request.user.is_authenticated.return_value = False >>> filenames = backend.prepare_filenames('foo/bar/hogehoge', ... request) >>> assert filenames == [ ... 'foo/bar/hogehoge_anonymous.html', ... 'foo/bar/hogehoge.html' ... ] >>> request.user.is_authenticated.return_value = True >>> filenames = backend.prepare_filenames('', ... request) >>> assert filenames == [ ... 'index_authenticated.html', ... 'index.html' ... ] >>> request.user.is_authenticated.return_value = False >>> filenames = backend.prepare_filenames('', ... request) >>> assert filenames == [ ... 'index_anonymous.html', ... 'index.html' ... ] """ filenames = [normalized_url] if request.user.is_authenticated(): filenames.insert(0, normalized_url + ".authenticated") else: filenames.insert(0, normalized_url + ".anonymous") return filenames
[ "def", "prepare_filenames", "(", "self", ",", "normalized_url", ",", "request", ")", ":", "filenames", "=", "[", "normalized_url", "]", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "filenames", ".", "insert", "(", "0", ",", "normalized_url", "+", "\".authenticated\"", ")", "else", ":", "filenames", ".", "insert", "(", "0", ",", "normalized_url", "+", "\".anonymous\"", ")", "return", "filenames" ]
Prepare template filename list based on the user authenticated state If user is authenticated user, it use '_authenticated' as a suffix. Otherwise it use '_anonymous' as a suffix to produce the template filename list. The list include original filename at the end of the list. Args: normalized_url (str): A normalized url request (instance): An instance of HttpRequest Returns: list Examples: >>> from mock import MagicMock >>> request = MagicMock() >>> backend = AuthTemplateFilenameBackend() >>> request.user.is_authenticated.return_value = True >>> filenames = backend.prepare_filenames('foo/bar/hogehoge', ... request) >>> assert filenames == [ ... 'foo/bar/hogehoge_authenticated.html', ... 'foo/bar/hogehoge.html' ... ] >>> request.user.is_authenticated.return_value = False >>> filenames = backend.prepare_filenames('foo/bar/hogehoge', ... request) >>> assert filenames == [ ... 'foo/bar/hogehoge_anonymous.html', ... 'foo/bar/hogehoge.html' ... ] >>> request.user.is_authenticated.return_value = True >>> filenames = backend.prepare_filenames('', ... request) >>> assert filenames == [ ... 'index_authenticated.html', ... 'index.html' ... ] >>> request.user.is_authenticated.return_value = False >>> filenames = backend.prepare_filenames('', ... request) >>> assert filenames == [ ... 'index_anonymous.html', ... 'index.html' ... ]
[ "Prepare", "template", "filename", "list", "based", "on", "the", "user", "authenticated", "state" ]
f6a2724ece729c5deced2c2546d172561ef785ec
https://github.com/lambdalisue/django-roughpages/blob/f6a2724ece729c5deced2c2546d172561ef785ec/src/roughpages/backends/auth.py#L16-L70
244,045
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
WSDLReader.loadFromStream
def loadFromStream(self, stream, name=None): """Return a WSDL instance loaded from a stream object.""" document = DOM.loadDocument(stream) wsdl = WSDL() if name: wsdl.location = name elif hasattr(stream, 'name'): wsdl.location = stream.name wsdl.load(document) return wsdl
python
def loadFromStream(self, stream, name=None): """Return a WSDL instance loaded from a stream object.""" document = DOM.loadDocument(stream) wsdl = WSDL() if name: wsdl.location = name elif hasattr(stream, 'name'): wsdl.location = stream.name wsdl.load(document) return wsdl
[ "def", "loadFromStream", "(", "self", ",", "stream", ",", "name", "=", "None", ")", ":", "document", "=", "DOM", ".", "loadDocument", "(", "stream", ")", "wsdl", "=", "WSDL", "(", ")", "if", "name", ":", "wsdl", ".", "location", "=", "name", "elif", "hasattr", "(", "stream", ",", "'name'", ")", ":", "wsdl", ".", "location", "=", "stream", ".", "name", "wsdl", ".", "load", "(", "document", ")", "return", "wsdl" ]
Return a WSDL instance loaded from a stream object.
[ "Return", "a", "WSDL", "instance", "loaded", "from", "a", "stream", "object", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L26-L35
244,046
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
WSDLReader.loadFromURL
def loadFromURL(self, url): """Return a WSDL instance loaded from the given url.""" document = DOM.loadFromURL(url) wsdl = WSDL() wsdl.location = url wsdl.load(document) return wsdl
python
def loadFromURL(self, url): """Return a WSDL instance loaded from the given url.""" document = DOM.loadFromURL(url) wsdl = WSDL() wsdl.location = url wsdl.load(document) return wsdl
[ "def", "loadFromURL", "(", "self", ",", "url", ")", ":", "document", "=", "DOM", ".", "loadFromURL", "(", "url", ")", "wsdl", "=", "WSDL", "(", ")", "wsdl", ".", "location", "=", "url", "wsdl", ".", "load", "(", "document", ")", "return", "wsdl" ]
Return a WSDL instance loaded from the given url.
[ "Return", "a", "WSDL", "instance", "loaded", "from", "the", "given", "url", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L37-L43
244,047
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
WSDLReader.loadFromFile
def loadFromFile(self, filename): """Return a WSDL instance loaded from the given file.""" file = open(filename, 'rb') try: wsdl = self.loadFromStream(file) finally: file.close() return wsdl
python
def loadFromFile(self, filename): """Return a WSDL instance loaded from the given file.""" file = open(filename, 'rb') try: wsdl = self.loadFromStream(file) finally: file.close() return wsdl
[ "def", "loadFromFile", "(", "self", ",", "filename", ")", ":", "file", "=", "open", "(", "filename", ",", "'rb'", ")", "try", ":", "wsdl", "=", "self", ".", "loadFromStream", "(", "file", ")", "finally", ":", "file", ".", "close", "(", ")", "return", "wsdl" ]
Return a WSDL instance loaded from the given file.
[ "Return", "a", "WSDL", "instance", "loaded", "from", "the", "given", "file", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L49-L56
244,048
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
WSDL.toDom
def toDom(self): """ Generate a DOM representation of the WSDL instance. Not dealing with generating XML Schema, thus the targetNamespace of all XML Schema elements or types used by WSDL message parts needs to be specified via import information items. """ namespaceURI = DOM.GetWSDLUri(self.version) self.document = DOM.createDocument(namespaceURI ,'wsdl:definitions') # Set up a couple prefixes for easy reading. child = DOM.getElement(self.document, None) child.setAttributeNS(None, 'targetNamespace', self.targetNamespace) child.setAttributeNS(XMLNS.BASE, 'xmlns:wsdl', namespaceURI) child.setAttributeNS(XMLNS.BASE, 'xmlns:xsd', 'http://www.w3.org/1999/XMLSchema') child.setAttributeNS(XMLNS.BASE, 'xmlns:soap', 'http://schemas.xmlsoap.org/wsdl/soap/') child.setAttributeNS(XMLNS.BASE, 'xmlns:tns', self.targetNamespace) if self.name: child.setAttributeNS(None, 'name', self.name) # wsdl:import for item in self.imports: item.toDom() # wsdl:message for item in self.messages: item.toDom() # wsdl:portType for item in self.portTypes: item.toDom() # wsdl:binding for item in self.bindings: item.toDom() # wsdl:service for item in self.services: item.toDom()
python
def toDom(self): """ Generate a DOM representation of the WSDL instance. Not dealing with generating XML Schema, thus the targetNamespace of all XML Schema elements or types used by WSDL message parts needs to be specified via import information items. """ namespaceURI = DOM.GetWSDLUri(self.version) self.document = DOM.createDocument(namespaceURI ,'wsdl:definitions') # Set up a couple prefixes for easy reading. child = DOM.getElement(self.document, None) child.setAttributeNS(None, 'targetNamespace', self.targetNamespace) child.setAttributeNS(XMLNS.BASE, 'xmlns:wsdl', namespaceURI) child.setAttributeNS(XMLNS.BASE, 'xmlns:xsd', 'http://www.w3.org/1999/XMLSchema') child.setAttributeNS(XMLNS.BASE, 'xmlns:soap', 'http://schemas.xmlsoap.org/wsdl/soap/') child.setAttributeNS(XMLNS.BASE, 'xmlns:tns', self.targetNamespace) if self.name: child.setAttributeNS(None, 'name', self.name) # wsdl:import for item in self.imports: item.toDom() # wsdl:message for item in self.messages: item.toDom() # wsdl:portType for item in self.portTypes: item.toDom() # wsdl:binding for item in self.bindings: item.toDom() # wsdl:service for item in self.services: item.toDom()
[ "def", "toDom", "(", "self", ")", ":", "namespaceURI", "=", "DOM", ".", "GetWSDLUri", "(", "self", ".", "version", ")", "self", ".", "document", "=", "DOM", ".", "createDocument", "(", "namespaceURI", ",", "'wsdl:definitions'", ")", "# Set up a couple prefixes for easy reading.", "child", "=", "DOM", ".", "getElement", "(", "self", ".", "document", ",", "None", ")", "child", ".", "setAttributeNS", "(", "None", ",", "'targetNamespace'", ",", "self", ".", "targetNamespace", ")", "child", ".", "setAttributeNS", "(", "XMLNS", ".", "BASE", ",", "'xmlns:wsdl'", ",", "namespaceURI", ")", "child", ".", "setAttributeNS", "(", "XMLNS", ".", "BASE", ",", "'xmlns:xsd'", ",", "'http://www.w3.org/1999/XMLSchema'", ")", "child", ".", "setAttributeNS", "(", "XMLNS", ".", "BASE", ",", "'xmlns:soap'", ",", "'http://schemas.xmlsoap.org/wsdl/soap/'", ")", "child", ".", "setAttributeNS", "(", "XMLNS", ".", "BASE", ",", "'xmlns:tns'", ",", "self", ".", "targetNamespace", ")", "if", "self", ".", "name", ":", "child", ".", "setAttributeNS", "(", "None", ",", "'name'", ",", "self", ".", "name", ")", "# wsdl:import", "for", "item", "in", "self", ".", "imports", ":", "item", ".", "toDom", "(", ")", "# wsdl:message", "for", "item", "in", "self", ".", "messages", ":", "item", ".", "toDom", "(", ")", "# wsdl:portType", "for", "item", "in", "self", ".", "portTypes", ":", "item", ".", "toDom", "(", ")", "# wsdl:binding", "for", "item", "in", "self", ".", "bindings", ":", "item", ".", "toDom", "(", ")", "# wsdl:service", "for", "item", "in", "self", ".", "services", ":", "item", ".", "toDom", "(", ")" ]
Generate a DOM representation of the WSDL instance. Not dealing with generating XML Schema, thus the targetNamespace of all XML Schema elements or types used by WSDL message parts needs to be specified via import information items.
[ "Generate", "a", "DOM", "representation", "of", "the", "WSDL", "instance", ".", "Not", "dealing", "with", "generating", "XML", "Schema", "thus", "the", "targetNamespace", "of", "all", "XML", "Schema", "elements", "or", "types", "used", "by", "WSDL", "message", "parts", "needs", "to", "be", "specified", "via", "import", "information", "items", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L133-L167
244,049
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
Element.getWSDL
def getWSDL(self): """Return the WSDL object that contains this information item.""" parent = self while 1: # skip any collections if isinstance(parent, WSDL): return parent try: parent = parent.parent() except: break return None
python
def getWSDL(self): """Return the WSDL object that contains this information item.""" parent = self while 1: # skip any collections if isinstance(parent, WSDL): return parent try: parent = parent.parent() except: break return None
[ "def", "getWSDL", "(", "self", ")", ":", "parent", "=", "self", "while", "1", ":", "# skip any collections", "if", "isinstance", "(", "parent", ",", "WSDL", ")", ":", "return", "parent", "try", ":", "parent", "=", "parent", ".", "parent", "(", ")", "except", ":", "break", "return", "None" ]
Return the WSDL object that contains this information item.
[ "Return", "the", "WSDL", "object", "that", "contains", "this", "information", "item", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L383-L393
244,050
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
Port.getBinding
def getBinding(self): """Return the Binding object that is referenced by this port.""" wsdl = self.getService().getWSDL() return wsdl.bindings[self.binding]
python
def getBinding(self): """Return the Binding object that is referenced by this port.""" wsdl = self.getService().getWSDL() return wsdl.bindings[self.binding]
[ "def", "getBinding", "(", "self", ")", ":", "wsdl", "=", "self", ".", "getService", "(", ")", ".", "getWSDL", "(", ")", "return", "wsdl", ".", "bindings", "[", "self", ".", "binding", "]" ]
Return the Binding object that is referenced by this port.
[ "Return", "the", "Binding", "object", "that", "is", "referenced", "by", "this", "port", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L1097-L1100
244,051
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
Port.getPortType
def getPortType(self): """Return the PortType object that is referenced by this port.""" wsdl = self.getService().getWSDL() binding = wsdl.bindings[self.binding] return wsdl.portTypes[binding.type]
python
def getPortType(self): """Return the PortType object that is referenced by this port.""" wsdl = self.getService().getWSDL() binding = wsdl.bindings[self.binding] return wsdl.portTypes[binding.type]
[ "def", "getPortType", "(", "self", ")", ":", "wsdl", "=", "self", ".", "getService", "(", ")", ".", "getWSDL", "(", ")", "binding", "=", "wsdl", ".", "bindings", "[", "self", ".", "binding", "]", "return", "wsdl", ".", "portTypes", "[", "binding", ".", "type", "]" ]
Return the PortType object that is referenced by this port.
[ "Return", "the", "PortType", "object", "that", "is", "referenced", "by", "this", "port", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L1102-L1106
244,052
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
Port.getAddressBinding
def getAddressBinding(self): """A convenience method to obtain the extension element used as the address binding for the port.""" for item in self.extensions: if isinstance(item, SoapAddressBinding) or \ isinstance(item, HttpAddressBinding): return item raise WSDLError( 'No address binding found in port.' )
python
def getAddressBinding(self): """A convenience method to obtain the extension element used as the address binding for the port.""" for item in self.extensions: if isinstance(item, SoapAddressBinding) or \ isinstance(item, HttpAddressBinding): return item raise WSDLError( 'No address binding found in port.' )
[ "def", "getAddressBinding", "(", "self", ")", ":", "for", "item", "in", "self", ".", "extensions", ":", "if", "isinstance", "(", "item", ",", "SoapAddressBinding", ")", "or", "isinstance", "(", "item", ",", "HttpAddressBinding", ")", ":", "return", "item", "raise", "WSDLError", "(", "'No address binding found in port.'", ")" ]
A convenience method to obtain the extension element used as the address binding for the port.
[ "A", "convenience", "method", "to", "obtain", "the", "extension", "element", "used", "as", "the", "address", "binding", "for", "the", "port", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L1108-L1117
244,053
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
SOAPCallInfo.addInParameter
def addInParameter(self, name, type, namespace=None, element_type=0): """Add an input parameter description to the call info.""" parameter = ParameterInfo(name, type, namespace, element_type) self.inparams.append(parameter) return parameter
python
def addInParameter(self, name, type, namespace=None, element_type=0): """Add an input parameter description to the call info.""" parameter = ParameterInfo(name, type, namespace, element_type) self.inparams.append(parameter) return parameter
[ "def", "addInParameter", "(", "self", ",", "name", ",", "type", ",", "namespace", "=", "None", ",", "element_type", "=", "0", ")", ":", "parameter", "=", "ParameterInfo", "(", "name", ",", "type", ",", "namespace", ",", "element_type", ")", "self", ".", "inparams", ".", "append", "(", "parameter", ")", "return", "parameter" ]
Add an input parameter description to the call info.
[ "Add", "an", "input", "parameter", "description", "to", "the", "call", "info", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L1467-L1471
244,054
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
SOAPCallInfo.addOutParameter
def addOutParameter(self, name, type, namespace=None, element_type=0): """Add an output parameter description to the call info.""" parameter = ParameterInfo(name, type, namespace, element_type) self.outparams.append(parameter) return parameter
python
def addOutParameter(self, name, type, namespace=None, element_type=0): """Add an output parameter description to the call info.""" parameter = ParameterInfo(name, type, namespace, element_type) self.outparams.append(parameter) return parameter
[ "def", "addOutParameter", "(", "self", ",", "name", ",", "type", ",", "namespace", "=", "None", ",", "element_type", "=", "0", ")", ":", "parameter", "=", "ParameterInfo", "(", "name", ",", "type", ",", "namespace", ",", "element_type", ")", "self", ".", "outparams", ".", "append", "(", "parameter", ")", "return", "parameter" ]
Add an output parameter description to the call info.
[ "Add", "an", "output", "parameter", "description", "to", "the", "call", "info", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L1473-L1477
244,055
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
SOAPCallInfo.setReturnParameter
def setReturnParameter(self, name, type, namespace=None, element_type=0): """Set the return parameter description for the call info.""" parameter = ParameterInfo(name, type, namespace, element_type) self.retval = parameter return parameter
python
def setReturnParameter(self, name, type, namespace=None, element_type=0): """Set the return parameter description for the call info.""" parameter = ParameterInfo(name, type, namespace, element_type) self.retval = parameter return parameter
[ "def", "setReturnParameter", "(", "self", ",", "name", ",", "type", ",", "namespace", "=", "None", ",", "element_type", "=", "0", ")", ":", "parameter", "=", "ParameterInfo", "(", "name", ",", "type", ",", "namespace", ",", "element_type", ")", "self", ".", "retval", "=", "parameter", "return", "parameter" ]
Set the return parameter description for the call info.
[ "Set", "the", "return", "parameter", "description", "for", "the", "call", "info", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L1479-L1483
244,056
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
SOAPCallInfo.addInHeaderInfo
def addInHeaderInfo(self, name, type, namespace, element_type=0, mustUnderstand=0): """Add an input SOAP header description to the call info.""" headerinfo = HeaderInfo(name, type, namespace, element_type) if mustUnderstand: headerinfo.mustUnderstand = 1 self.inheaders.append(headerinfo) return headerinfo
python
def addInHeaderInfo(self, name, type, namespace, element_type=0, mustUnderstand=0): """Add an input SOAP header description to the call info.""" headerinfo = HeaderInfo(name, type, namespace, element_type) if mustUnderstand: headerinfo.mustUnderstand = 1 self.inheaders.append(headerinfo) return headerinfo
[ "def", "addInHeaderInfo", "(", "self", ",", "name", ",", "type", ",", "namespace", ",", "element_type", "=", "0", ",", "mustUnderstand", "=", "0", ")", ":", "headerinfo", "=", "HeaderInfo", "(", "name", ",", "type", ",", "namespace", ",", "element_type", ")", "if", "mustUnderstand", ":", "headerinfo", ".", "mustUnderstand", "=", "1", "self", ".", "inheaders", ".", "append", "(", "headerinfo", ")", "return", "headerinfo" ]
Add an input SOAP header description to the call info.
[ "Add", "an", "input", "SOAP", "header", "description", "to", "the", "call", "info", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L1485-L1492
244,057
rameshg87/pyremotevbox
pyremotevbox/ZSI/wstools/WSDLTools.py
SOAPCallInfo.addOutHeaderInfo
def addOutHeaderInfo(self, name, type, namespace, element_type=0, mustUnderstand=0): """Add an output SOAP header description to the call info.""" headerinfo = HeaderInfo(name, type, namespace, element_type) if mustUnderstand: headerinfo.mustUnderstand = 1 self.outheaders.append(headerinfo) return headerinfo
python
def addOutHeaderInfo(self, name, type, namespace, element_type=0, mustUnderstand=0): """Add an output SOAP header description to the call info.""" headerinfo = HeaderInfo(name, type, namespace, element_type) if mustUnderstand: headerinfo.mustUnderstand = 1 self.outheaders.append(headerinfo) return headerinfo
[ "def", "addOutHeaderInfo", "(", "self", ",", "name", ",", "type", ",", "namespace", ",", "element_type", "=", "0", ",", "mustUnderstand", "=", "0", ")", ":", "headerinfo", "=", "HeaderInfo", "(", "name", ",", "type", ",", "namespace", ",", "element_type", ")", "if", "mustUnderstand", ":", "headerinfo", ".", "mustUnderstand", "=", "1", "self", ".", "outheaders", ".", "append", "(", "headerinfo", ")", "return", "headerinfo" ]
Add an output SOAP header description to the call info.
[ "Add", "an", "output", "SOAP", "header", "description", "to", "the", "call", "info", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/wstools/WSDLTools.py#L1494-L1501
244,058
padfoot27/merlin
venv/lib/python2.7/site-packages/setuptools/dist.py
check_requirements
def check_requirements(dist, attr, value): """Verify that install_requires is a valid requirements list""" try: list(pkg_resources.parse_requirements(value)) except (TypeError,ValueError): raise DistutilsSetupError( "%r must be a string or list of strings " "containing valid project/version requirement specifiers" % (attr,) )
python
def check_requirements(dist, attr, value): """Verify that install_requires is a valid requirements list""" try: list(pkg_resources.parse_requirements(value)) except (TypeError,ValueError): raise DistutilsSetupError( "%r must be a string or list of strings " "containing valid project/version requirement specifiers" % (attr,) )
[ "def", "check_requirements", "(", "dist", ",", "attr", ",", "value", ")", ":", "try", ":", "list", "(", "pkg_resources", ".", "parse_requirements", "(", "value", ")", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "raise", "DistutilsSetupError", "(", "\"%r must be a string or list of strings \"", "\"containing valid project/version requirement specifiers\"", "%", "(", "attr", ",", ")", ")" ]
Verify that install_requires is a valid requirements list
[ "Verify", "that", "install_requires", "is", "a", "valid", "requirements", "list" ]
c317505c5eca0e774fcf8b8c7f08801479a5099a
https://github.com/padfoot27/merlin/blob/c317505c5eca0e774fcf8b8c7f08801479a5099a/venv/lib/python2.7/site-packages/setuptools/dist.py#L121-L129
244,059
pudo-attic/loadkit
loadkit/types/table.py
Table.store
def store(self): """ Create a context manager to store records in the cleaned table. """ output = tempfile.NamedTemporaryFile(suffix='.json') try: def write(o): line = json.dumps(o, default=json_default) return output.write(line + '\n') yield write output.seek(0) log.info("Uploading generated table (%s)...", self._obj) self.save_file(output.name, destructive=True) finally: try: output.close() except: pass
python
def store(self): """ Create a context manager to store records in the cleaned table. """ output = tempfile.NamedTemporaryFile(suffix='.json') try: def write(o): line = json.dumps(o, default=json_default) return output.write(line + '\n') yield write output.seek(0) log.info("Uploading generated table (%s)...", self._obj) self.save_file(output.name, destructive=True) finally: try: output.close() except: pass
[ "def", "store", "(", "self", ")", ":", "output", "=", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.json'", ")", "try", ":", "def", "write", "(", "o", ")", ":", "line", "=", "json", ".", "dumps", "(", "o", ",", "default", "=", "json_default", ")", "return", "output", ".", "write", "(", "line", "+", "'\\n'", ")", "yield", "write", "output", ".", "seek", "(", "0", ")", "log", ".", "info", "(", "\"Uploading generated table (%s)...\"", ",", "self", ".", "_obj", ")", "self", ".", "save_file", "(", "output", ".", "name", ",", "destructive", "=", "True", ")", "finally", ":", "try", ":", "output", ".", "close", "(", ")", "except", ":", "pass" ]
Create a context manager to store records in the cleaned table.
[ "Create", "a", "context", "manager", "to", "store", "records", "in", "the", "cleaned", "table", "." ]
1fb17e69e2ffaf3dac4f40b574c3b7afb2198b7c
https://github.com/pudo-attic/loadkit/blob/1fb17e69e2ffaf3dac4f40b574c3b7afb2198b7c/loadkit/types/table.py#L21-L40
244,060
pudo-attic/loadkit
loadkit/types/table.py
Table.records
def records(self): """ Get each record that has been stored in the table. """ output = tempfile.NamedTemporaryFile(suffix='.json') try: log.info("Loading table from (%s)...", self._obj) shutil.copyfileobj(self.fh(), output) output.seek(0) for line in output.file: yield json.loads(line, object_hook=json_hook) finally: try: output.close() except: pass
python
def records(self): """ Get each record that has been stored in the table. """ output = tempfile.NamedTemporaryFile(suffix='.json') try: log.info("Loading table from (%s)...", self._obj) shutil.copyfileobj(self.fh(), output) output.seek(0) for line in output.file: yield json.loads(line, object_hook=json_hook) finally: try: output.close() except: pass
[ "def", "records", "(", "self", ")", ":", "output", "=", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.json'", ")", "try", ":", "log", ".", "info", "(", "\"Loading table from (%s)...\"", ",", "self", ".", "_obj", ")", "shutil", ".", "copyfileobj", "(", "self", ".", "fh", "(", ")", ",", "output", ")", "output", ".", "seek", "(", "0", ")", "for", "line", "in", "output", ".", "file", ":", "yield", "json", ".", "loads", "(", "line", ",", "object_hook", "=", "json_hook", ")", "finally", ":", "try", ":", "output", ".", "close", "(", ")", "except", ":", "pass" ]
Get each record that has been stored in the table.
[ "Get", "each", "record", "that", "has", "been", "stored", "in", "the", "table", "." ]
1fb17e69e2ffaf3dac4f40b574c3b7afb2198b7c
https://github.com/pudo-attic/loadkit/blob/1fb17e69e2ffaf3dac4f40b574c3b7afb2198b7c/loadkit/types/table.py#L42-L57
244,061
radjkarl/fancyTools
fancytools/pystructure/GetCallablesInPackage.py
GetCallablesInPackage._cleanRecursive
def _cleanRecursive(self, subSelf): """ Delete all NestedOrderedDict that haven't any entries. """ for key, item in list(subSelf.items()): if self.isNestedDict(item): if not item: subSelf.pop(key) else: self._cleanRecursive(item)
python
def _cleanRecursive(self, subSelf): """ Delete all NestedOrderedDict that haven't any entries. """ for key, item in list(subSelf.items()): if self.isNestedDict(item): if not item: subSelf.pop(key) else: self._cleanRecursive(item)
[ "def", "_cleanRecursive", "(", "self", ",", "subSelf", ")", ":", "for", "key", ",", "item", "in", "list", "(", "subSelf", ".", "items", "(", ")", ")", ":", "if", "self", ".", "isNestedDict", "(", "item", ")", ":", "if", "not", "item", ":", "subSelf", ".", "pop", "(", "key", ")", "else", ":", "self", ".", "_cleanRecursive", "(", "item", ")" ]
Delete all NestedOrderedDict that haven't any entries.
[ "Delete", "all", "NestedOrderedDict", "that", "haven", "t", "any", "entries", "." ]
4c4d961003dc4ed6e46429a0c24f7e2bb52caa8b
https://github.com/radjkarl/fancyTools/blob/4c4d961003dc4ed6e46429a0c24f7e2bb52caa8b/fancytools/pystructure/GetCallablesInPackage.py#L67-L76
244,062
radjkarl/fancyTools
fancytools/pystructure/GetCallablesInPackage.py
GetCallablesInPackage.belongsToModule
def belongsToModule(obj, module): """Returns True is an object belongs to a module.""" return obj.__module__ == module.__name__ or obj.__module__.startswith( module.__name__)
python
def belongsToModule(obj, module): """Returns True is an object belongs to a module.""" return obj.__module__ == module.__name__ or obj.__module__.startswith( module.__name__)
[ "def", "belongsToModule", "(", "obj", ",", "module", ")", ":", "return", "obj", ".", "__module__", "==", "module", ".", "__name__", "or", "obj", ".", "__module__", ".", "startswith", "(", "module", ".", "__name__", ")" ]
Returns True is an object belongs to a module.
[ "Returns", "True", "is", "an", "object", "belongs", "to", "a", "module", "." ]
4c4d961003dc4ed6e46429a0c24f7e2bb52caa8b
https://github.com/radjkarl/fancyTools/blob/4c4d961003dc4ed6e46429a0c24f7e2bb52caa8b/fancytools/pystructure/GetCallablesInPackage.py#L125-L128
244,063
erwan-lemonnier/dynamodb-object-store
dynadbobjectstore.py
ObjectStore.create_table
def create_table(self): """Create the DynamoDB table used by this ObjectStore, only if it does not already exists. """ all_tables = self.aws_conn.list_tables()['TableNames'] if self.table_name in all_tables: log.info("Table %s already exists" % self.table_name) else: log.info("Table %s does not exist: creating it" % self.table_name) self.table = Table.create( self.table_name, schema=[ HashKey('key') ], throughput={ 'read': 10, 'write': 10, }, connection=self.aws_conn, )
python
def create_table(self): """Create the DynamoDB table used by this ObjectStore, only if it does not already exists. """ all_tables = self.aws_conn.list_tables()['TableNames'] if self.table_name in all_tables: log.info("Table %s already exists" % self.table_name) else: log.info("Table %s does not exist: creating it" % self.table_name) self.table = Table.create( self.table_name, schema=[ HashKey('key') ], throughput={ 'read': 10, 'write': 10, }, connection=self.aws_conn, )
[ "def", "create_table", "(", "self", ")", ":", "all_tables", "=", "self", ".", "aws_conn", ".", "list_tables", "(", ")", "[", "'TableNames'", "]", "if", "self", ".", "table_name", "in", "all_tables", ":", "log", ".", "info", "(", "\"Table %s already exists\"", "%", "self", ".", "table_name", ")", "else", ":", "log", ".", "info", "(", "\"Table %s does not exist: creating it\"", "%", "self", ".", "table_name", ")", "self", ".", "table", "=", "Table", ".", "create", "(", "self", ".", "table_name", ",", "schema", "=", "[", "HashKey", "(", "'key'", ")", "]", ",", "throughput", "=", "{", "'read'", ":", "10", ",", "'write'", ":", "10", ",", "}", ",", "connection", "=", "self", ".", "aws_conn", ",", ")" ]
Create the DynamoDB table used by this ObjectStore, only if it does not already exists.
[ "Create", "the", "DynamoDB", "table", "used", "by", "this", "ObjectStore", "only", "if", "it", "does", "not", "already", "exists", "." ]
fd0eee1912bc9c2139541a41928ee08efb4270f8
https://github.com/erwan-lemonnier/dynamodb-object-store/blob/fd0eee1912bc9c2139541a41928ee08efb4270f8/dynadbobjectstore.py#L28-L50
244,064
erwan-lemonnier/dynamodb-object-store
dynadbobjectstore.py
ObjectStore.put
def put(self, key, value, overwrite=True): """Marshall the python object given as 'value' into a string, using the to_string marshalling method passed in the constructor, and store it in the DynamoDB table under key 'key'. """ self._get_table() s = self.to_string(value) log.debug("Storing in key '%s' the object: '%s'" % (key, s)) self.table.put_item( data={ 'key': key, 'value': s, }, overwrite=overwrite )
python
def put(self, key, value, overwrite=True): """Marshall the python object given as 'value' into a string, using the to_string marshalling method passed in the constructor, and store it in the DynamoDB table under key 'key'. """ self._get_table() s = self.to_string(value) log.debug("Storing in key '%s' the object: '%s'" % (key, s)) self.table.put_item( data={ 'key': key, 'value': s, }, overwrite=overwrite )
[ "def", "put", "(", "self", ",", "key", ",", "value", ",", "overwrite", "=", "True", ")", ":", "self", ".", "_get_table", "(", ")", "s", "=", "self", ".", "to_string", "(", "value", ")", "log", ".", "debug", "(", "\"Storing in key '%s' the object: '%s'\"", "%", "(", "key", ",", "s", ")", ")", "self", ".", "table", ".", "put_item", "(", "data", "=", "{", "'key'", ":", "key", ",", "'value'", ":", "s", ",", "}", ",", "overwrite", "=", "overwrite", ")" ]
Marshall the python object given as 'value' into a string, using the to_string marshalling method passed in the constructor, and store it in the DynamoDB table under key 'key'.
[ "Marshall", "the", "python", "object", "given", "as", "value", "into", "a", "string", "using", "the", "to_string", "marshalling", "method", "passed", "in", "the", "constructor", "and", "store", "it", "in", "the", "DynamoDB", "table", "under", "key", "key", "." ]
fd0eee1912bc9c2139541a41928ee08efb4270f8
https://github.com/erwan-lemonnier/dynamodb-object-store/blob/fd0eee1912bc9c2139541a41928ee08efb4270f8/dynadbobjectstore.py#L56-L70
244,065
erwan-lemonnier/dynamodb-object-store
dynadbobjectstore.py
ObjectStore.get
def get(self, key): """Get the string representation of the object stored in DynamoDB under this key, convert it back to an object using the 'from_string' unmarshalling method passed in the constructor and return the object. Return None if no object found. """ self._get_table() s = self.table.get_item(key=key) log.debug("Retrieved from key '%s' the object: '%s'" % (key, s['value'])) return self.from_string(s['value'])
python
def get(self, key): """Get the string representation of the object stored in DynamoDB under this key, convert it back to an object using the 'from_string' unmarshalling method passed in the constructor and return the object. Return None if no object found. """ self._get_table() s = self.table.get_item(key=key) log.debug("Retrieved from key '%s' the object: '%s'" % (key, s['value'])) return self.from_string(s['value'])
[ "def", "get", "(", "self", ",", "key", ")", ":", "self", ".", "_get_table", "(", ")", "s", "=", "self", ".", "table", ".", "get_item", "(", "key", "=", "key", ")", "log", ".", "debug", "(", "\"Retrieved from key '%s' the object: '%s'\"", "%", "(", "key", ",", "s", "[", "'value'", "]", ")", ")", "return", "self", ".", "from_string", "(", "s", "[", "'value'", "]", ")" ]
Get the string representation of the object stored in DynamoDB under this key, convert it back to an object using the 'from_string' unmarshalling method passed in the constructor and return the object. Return None if no object found.
[ "Get", "the", "string", "representation", "of", "the", "object", "stored", "in", "DynamoDB", "under", "this", "key", "convert", "it", "back", "to", "an", "object", "using", "the", "from_string", "unmarshalling", "method", "passed", "in", "the", "constructor", "and", "return", "the", "object", ".", "Return", "None", "if", "no", "object", "found", "." ]
fd0eee1912bc9c2139541a41928ee08efb4270f8
https://github.com/erwan-lemonnier/dynamodb-object-store/blob/fd0eee1912bc9c2139541a41928ee08efb4270f8/dynadbobjectstore.py#L72-L80
244,066
erwan-lemonnier/dynamodb-object-store
dynadbobjectstore.py
ObjectStore.delete
def delete(self, key): """If this key exists, delete it""" self._get_table() self.table.delete_item(key=key) log.debug("Deleted item at key '%s'" % (key))
python
def delete(self, key): """If this key exists, delete it""" self._get_table() self.table.delete_item(key=key) log.debug("Deleted item at key '%s'" % (key))
[ "def", "delete", "(", "self", ",", "key", ")", ":", "self", ".", "_get_table", "(", ")", "self", ".", "table", ".", "delete_item", "(", "key", "=", "key", ")", "log", ".", "debug", "(", "\"Deleted item at key '%s'\"", "%", "(", "key", ")", ")" ]
If this key exists, delete it
[ "If", "this", "key", "exists", "delete", "it" ]
fd0eee1912bc9c2139541a41928ee08efb4270f8
https://github.com/erwan-lemonnier/dynamodb-object-store/blob/fd0eee1912bc9c2139541a41928ee08efb4270f8/dynadbobjectstore.py#L82-L86
244,067
yunojuno/python-env-utils
env_utils/utils.py
_get_env
def _get_env(key, default=None, coerce=lambda x: x, required=False): """ Return env var coerced into a type other than string. This function extends the standard os.getenv function to enable the coercion of values into data types other than string (all env vars are strings by default). Args: key: string, the name of the env var to look up Kwargs: default: the default value to return if the env var does not exist. NB the default value is **not** coerced, and is assumed to be of the correct type. coerce: a function that is used to coerce the value returned into another type required: bool, if True, then a RequiredSettingMissing error is raised if the env var does not exist. Returns the env var, passed through the coerce function """ try: value = os.environ[key] except KeyError: if required is True: raise RequiredSettingMissing(key) else: return default try: return coerce(value) except Exception: raise CoercianError(key, value, coerce)
python
def _get_env(key, default=None, coerce=lambda x: x, required=False): """ Return env var coerced into a type other than string. This function extends the standard os.getenv function to enable the coercion of values into data types other than string (all env vars are strings by default). Args: key: string, the name of the env var to look up Kwargs: default: the default value to return if the env var does not exist. NB the default value is **not** coerced, and is assumed to be of the correct type. coerce: a function that is used to coerce the value returned into another type required: bool, if True, then a RequiredSettingMissing error is raised if the env var does not exist. Returns the env var, passed through the coerce function """ try: value = os.environ[key] except KeyError: if required is True: raise RequiredSettingMissing(key) else: return default try: return coerce(value) except Exception: raise CoercianError(key, value, coerce)
[ "def", "_get_env", "(", "key", ",", "default", "=", "None", ",", "coerce", "=", "lambda", "x", ":", "x", ",", "required", "=", "False", ")", ":", "try", ":", "value", "=", "os", ".", "environ", "[", "key", "]", "except", "KeyError", ":", "if", "required", "is", "True", ":", "raise", "RequiredSettingMissing", "(", "key", ")", "else", ":", "return", "default", "try", ":", "return", "coerce", "(", "value", ")", "except", "Exception", ":", "raise", "CoercianError", "(", "key", ",", "value", ",", "coerce", ")" ]
Return env var coerced into a type other than string. This function extends the standard os.getenv function to enable the coercion of values into data types other than string (all env vars are strings by default). Args: key: string, the name of the env var to look up Kwargs: default: the default value to return if the env var does not exist. NB the default value is **not** coerced, and is assumed to be of the correct type. coerce: a function that is used to coerce the value returned into another type required: bool, if True, then a RequiredSettingMissing error is raised if the env var does not exist. Returns the env var, passed through the coerce function
[ "Return", "env", "var", "coerced", "into", "a", "type", "other", "than", "string", "." ]
7f3b5635f93322759856644901221955908e7e99
https://github.com/yunojuno/python-env-utils/blob/7f3b5635f93322759856644901221955908e7e99/env_utils/utils.py#L27-L60
244,068
yunojuno/python-env-utils
env_utils/utils.py
get_env
def get_env(key, *default, **kwargs): """ Return env var. This is the parent function of all other get_foo functions, and is responsible for unpacking args/kwargs into the values that _get_env expects (it is the root function that actually interacts with environ). Args: key: string, the env var name to look up. default: (optional) the value to use if the env var does not exist. If this value is not supplied, then the env var is considered to be required, and a RequiredSettingMissing error will be raised if it does not exist. Kwargs: coerce: a func that may be supplied to coerce the value into something else. This is used by the default get_foo functions to cast strings to builtin types, but could be a function that returns a custom class. Returns the env var, coerced if required, and a default if supplied. """ assert len(default) in (0, 1), "Too many args supplied." func = kwargs.get('coerce', lambda x: x) required = (len(default) == 0) default = default[0] if not required else None return _get_env(key, default=default, coerce=func, required=required)
python
def get_env(key, *default, **kwargs): """ Return env var. This is the parent function of all other get_foo functions, and is responsible for unpacking args/kwargs into the values that _get_env expects (it is the root function that actually interacts with environ). Args: key: string, the env var name to look up. default: (optional) the value to use if the env var does not exist. If this value is not supplied, then the env var is considered to be required, and a RequiredSettingMissing error will be raised if it does not exist. Kwargs: coerce: a func that may be supplied to coerce the value into something else. This is used by the default get_foo functions to cast strings to builtin types, but could be a function that returns a custom class. Returns the env var, coerced if required, and a default if supplied. """ assert len(default) in (0, 1), "Too many args supplied." func = kwargs.get('coerce', lambda x: x) required = (len(default) == 0) default = default[0] if not required else None return _get_env(key, default=default, coerce=func, required=required)
[ "def", "get_env", "(", "key", ",", "*", "default", ",", "*", "*", "kwargs", ")", ":", "assert", "len", "(", "default", ")", "in", "(", "0", ",", "1", ")", ",", "\"Too many args supplied.\"", "func", "=", "kwargs", ".", "get", "(", "'coerce'", ",", "lambda", "x", ":", "x", ")", "required", "=", "(", "len", "(", "default", ")", "==", "0", ")", "default", "=", "default", "[", "0", "]", "if", "not", "required", "else", "None", "return", "_get_env", "(", "key", ",", "default", "=", "default", ",", "coerce", "=", "func", ",", "required", "=", "required", ")" ]
Return env var. This is the parent function of all other get_foo functions, and is responsible for unpacking args/kwargs into the values that _get_env expects (it is the root function that actually interacts with environ). Args: key: string, the env var name to look up. default: (optional) the value to use if the env var does not exist. If this value is not supplied, then the env var is considered to be required, and a RequiredSettingMissing error will be raised if it does not exist. Kwargs: coerce: a func that may be supplied to coerce the value into something else. This is used by the default get_foo functions to cast strings to builtin types, but could be a function that returns a custom class. Returns the env var, coerced if required, and a default if supplied.
[ "Return", "env", "var", "." ]
7f3b5635f93322759856644901221955908e7e99
https://github.com/yunojuno/python-env-utils/blob/7f3b5635f93322759856644901221955908e7e99/env_utils/utils.py#L94-L123
244,069
yunojuno/python-env-utils
env_utils/utils.py
get_list
def get_list(key, *default, **kwargs): """Return env var as a list.""" separator = kwargs.get('separator', ' ') return get_env(key, *default, coerce=lambda x: x.split(separator))
python
def get_list(key, *default, **kwargs): """Return env var as a list.""" separator = kwargs.get('separator', ' ') return get_env(key, *default, coerce=lambda x: x.split(separator))
[ "def", "get_list", "(", "key", ",", "*", "default", ",", "*", "*", "kwargs", ")", ":", "separator", "=", "kwargs", ".", "get", "(", "'separator'", ",", "' '", ")", "return", "get_env", "(", "key", ",", "*", "default", ",", "coerce", "=", "lambda", "x", ":", "x", ".", "split", "(", "separator", ")", ")" ]
Return env var as a list.
[ "Return", "env", "var", "as", "a", "list", "." ]
7f3b5635f93322759856644901221955908e7e99
https://github.com/yunojuno/python-env-utils/blob/7f3b5635f93322759856644901221955908e7e99/env_utils/utils.py#L146-L149
244,070
dossier/dossier.models
dossier/models/etl/ads.py
row_to_content_obj
def row_to_content_obj(key_row): '''Returns ``FeatureCollection`` given an HBase artifact row. Note that the FC returned has a Unicode feature ``artifact_id`` set to the row's key. ''' key, row = key_row cid = mk_content_id(key.encode('utf-8')) response = row.get('response', {}) other_bows = defaultdict(StringCounter) for attr, val in row.get('indices', []): other_bows[attr][val] += 1 try: artifact_id = key if isinstance(artifact_id, str): artifact_id = unicode(artifact_id, 'utf-8') fc = html_to_fc( response.get('body', ''), url=row.get('url'), timestamp=row.get('timestamp'), other_features=dict(other_bows, **{'artifact_id': artifact_id})) except: fc = None print('Could not create FC for %s:' % cid, file=sys.stderr) print(traceback.format_exc(), file=sys.stderr) return cid, fc
python
def row_to_content_obj(key_row): '''Returns ``FeatureCollection`` given an HBase artifact row. Note that the FC returned has a Unicode feature ``artifact_id`` set to the row's key. ''' key, row = key_row cid = mk_content_id(key.encode('utf-8')) response = row.get('response', {}) other_bows = defaultdict(StringCounter) for attr, val in row.get('indices', []): other_bows[attr][val] += 1 try: artifact_id = key if isinstance(artifact_id, str): artifact_id = unicode(artifact_id, 'utf-8') fc = html_to_fc( response.get('body', ''), url=row.get('url'), timestamp=row.get('timestamp'), other_features=dict(other_bows, **{'artifact_id': artifact_id})) except: fc = None print('Could not create FC for %s:' % cid, file=sys.stderr) print(traceback.format_exc(), file=sys.stderr) return cid, fc
[ "def", "row_to_content_obj", "(", "key_row", ")", ":", "key", ",", "row", "=", "key_row", "cid", "=", "mk_content_id", "(", "key", ".", "encode", "(", "'utf-8'", ")", ")", "response", "=", "row", ".", "get", "(", "'response'", ",", "{", "}", ")", "other_bows", "=", "defaultdict", "(", "StringCounter", ")", "for", "attr", ",", "val", "in", "row", ".", "get", "(", "'indices'", ",", "[", "]", ")", ":", "other_bows", "[", "attr", "]", "[", "val", "]", "+=", "1", "try", ":", "artifact_id", "=", "key", "if", "isinstance", "(", "artifact_id", ",", "str", ")", ":", "artifact_id", "=", "unicode", "(", "artifact_id", ",", "'utf-8'", ")", "fc", "=", "html_to_fc", "(", "response", ".", "get", "(", "'body'", ",", "''", ")", ",", "url", "=", "row", ".", "get", "(", "'url'", ")", ",", "timestamp", "=", "row", ".", "get", "(", "'timestamp'", ")", ",", "other_features", "=", "dict", "(", "other_bows", ",", "*", "*", "{", "'artifact_id'", ":", "artifact_id", "}", ")", ")", "except", ":", "fc", "=", "None", "print", "(", "'Could not create FC for %s:'", "%", "cid", ",", "file", "=", "sys", ".", "stderr", ")", "print", "(", "traceback", ".", "format_exc", "(", ")", ",", "file", "=", "sys", ".", "stderr", ")", "return", "cid", ",", "fc" ]
Returns ``FeatureCollection`` given an HBase artifact row. Note that the FC returned has a Unicode feature ``artifact_id`` set to the row's key.
[ "Returns", "FeatureCollection", "given", "an", "HBase", "artifact", "row", "." ]
c9e282f690eab72963926329efe1600709e48b13
https://github.com/dossier/dossier.models/blob/c9e282f690eab72963926329efe1600709e48b13/dossier/models/etl/ads.py#L58-L83
244,071
rameshg87/pyremotevbox
pyremotevbox/ZSI/TCtimes.py
_dict_to_tuple
def _dict_to_tuple(d): '''Convert a dictionary to a time tuple. Depends on key values in the regexp pattern! ''' # TODO: Adding a ms field to struct_time tuples is problematic # since they don't have this field. Should use datetime # which has a microseconds field, else no ms.. When mapping struct_time # to gDateTime the last 3 fields are irrelevant, here using dummy values to make # everything happy. # retval = _niltime[:] for k,i in ( ('Y', 0), ('M', 1), ('D', 2), ('h', 3), ('m', 4), ): v = d.get(k) if v: retval[i] = int(v) v = d.get('s') if v: msec,sec = _modf(float(v)) retval[6],retval[5] = int(round(msec*1000)), int(sec) v = d.get('tz') if v and v != 'Z': h,m = map(int, v.split(':')) # check for time zone offset, if within the same timezone, # ignore offset specific calculations offset=_localtimezone().utcoffset(_datetime.now()) local_offset_hour = offset.seconds/3600 local_offset_min = (offset.seconds%3600)%60 if local_offset_hour > 12: local_offset_hour -= 24 if local_offset_hour != h or local_offset_min != m: if h<0: #TODO: why is this set to server #foff = _fixedoffset(-((abs(h)*60+m)),"server") foff = _fixedoffset(-((abs(h)*60+m))) else: #TODO: why is this set to server #foff = _fixedoffset((abs(h)*60+m),"server") foff = _fixedoffset((abs(h)*60+m)) dt = _datetime(retval[0],retval[1],retval[2],retval[3],retval[4], retval[5],0,foff) # update dict with calculated timezone localdt=dt.astimezone(_localtimezone()) retval[0] = localdt.year retval[1] = localdt.month retval[2] = localdt.day retval[3] = localdt.hour retval[4] = localdt.minute retval[5] = localdt.second if d.get('neg', 0): retval[0:5] = map(operator.__neg__, retval[0:5]) return tuple(retval)
python
def _dict_to_tuple(d): '''Convert a dictionary to a time tuple. Depends on key values in the regexp pattern! ''' # TODO: Adding a ms field to struct_time tuples is problematic # since they don't have this field. Should use datetime # which has a microseconds field, else no ms.. When mapping struct_time # to gDateTime the last 3 fields are irrelevant, here using dummy values to make # everything happy. # retval = _niltime[:] for k,i in ( ('Y', 0), ('M', 1), ('D', 2), ('h', 3), ('m', 4), ): v = d.get(k) if v: retval[i] = int(v) v = d.get('s') if v: msec,sec = _modf(float(v)) retval[6],retval[5] = int(round(msec*1000)), int(sec) v = d.get('tz') if v and v != 'Z': h,m = map(int, v.split(':')) # check for time zone offset, if within the same timezone, # ignore offset specific calculations offset=_localtimezone().utcoffset(_datetime.now()) local_offset_hour = offset.seconds/3600 local_offset_min = (offset.seconds%3600)%60 if local_offset_hour > 12: local_offset_hour -= 24 if local_offset_hour != h or local_offset_min != m: if h<0: #TODO: why is this set to server #foff = _fixedoffset(-((abs(h)*60+m)),"server") foff = _fixedoffset(-((abs(h)*60+m))) else: #TODO: why is this set to server #foff = _fixedoffset((abs(h)*60+m),"server") foff = _fixedoffset((abs(h)*60+m)) dt = _datetime(retval[0],retval[1],retval[2],retval[3],retval[4], retval[5],0,foff) # update dict with calculated timezone localdt=dt.astimezone(_localtimezone()) retval[0] = localdt.year retval[1] = localdt.month retval[2] = localdt.day retval[3] = localdt.hour retval[4] = localdt.minute retval[5] = localdt.second if d.get('neg', 0): retval[0:5] = map(operator.__neg__, retval[0:5]) return tuple(retval)
[ "def", "_dict_to_tuple", "(", "d", ")", ":", "# TODO: Adding a ms field to struct_time tuples is problematic ", "# since they don't have this field. Should use datetime", "# which has a microseconds field, else no ms.. When mapping struct_time ", "# to gDateTime the last 3 fields are irrelevant, here using dummy values to make", "# everything happy.", "# ", "retval", "=", "_niltime", "[", ":", "]", "for", "k", ",", "i", "in", "(", "(", "'Y'", ",", "0", ")", ",", "(", "'M'", ",", "1", ")", ",", "(", "'D'", ",", "2", ")", ",", "(", "'h'", ",", "3", ")", ",", "(", "'m'", ",", "4", ")", ",", ")", ":", "v", "=", "d", ".", "get", "(", "k", ")", "if", "v", ":", "retval", "[", "i", "]", "=", "int", "(", "v", ")", "v", "=", "d", ".", "get", "(", "'s'", ")", "if", "v", ":", "msec", ",", "sec", "=", "_modf", "(", "float", "(", "v", ")", ")", "retval", "[", "6", "]", ",", "retval", "[", "5", "]", "=", "int", "(", "round", "(", "msec", "*", "1000", ")", ")", ",", "int", "(", "sec", ")", "v", "=", "d", ".", "get", "(", "'tz'", ")", "if", "v", "and", "v", "!=", "'Z'", ":", "h", ",", "m", "=", "map", "(", "int", ",", "v", ".", "split", "(", "':'", ")", ")", "# check for time zone offset, if within the same timezone, ", "# ignore offset specific calculations", "offset", "=", "_localtimezone", "(", ")", ".", "utcoffset", "(", "_datetime", ".", "now", "(", ")", ")", "local_offset_hour", "=", "offset", ".", "seconds", "/", "3600", "local_offset_min", "=", "(", "offset", ".", "seconds", "%", "3600", ")", "%", "60", "if", "local_offset_hour", ">", "12", ":", "local_offset_hour", "-=", "24", "if", "local_offset_hour", "!=", "h", "or", "local_offset_min", "!=", "m", ":", "if", "h", "<", "0", ":", "#TODO: why is this set to server", "#foff = _fixedoffset(-((abs(h)*60+m)),\"server\")", "foff", "=", "_fixedoffset", "(", "-", "(", "(", "abs", "(", "h", ")", "*", "60", "+", "m", ")", ")", ")", "else", ":", "#TODO: why is this set to server", "#foff = _fixedoffset((abs(h)*60+m),\"server\")", "foff", "=", "_fixedoffset", "(", "(", "abs", "(", "h", ")", "*", "60", "+", "m", ")", ")", "dt", "=", "_datetime", "(", "retval", "[", "0", "]", ",", "retval", "[", "1", "]", ",", "retval", "[", "2", "]", ",", "retval", "[", "3", "]", ",", "retval", "[", "4", "]", ",", "retval", "[", "5", "]", ",", "0", ",", "foff", ")", "# update dict with calculated timezone", "localdt", "=", "dt", ".", "astimezone", "(", "_localtimezone", "(", ")", ")", "retval", "[", "0", "]", "=", "localdt", ".", "year", "retval", "[", "1", "]", "=", "localdt", ".", "month", "retval", "[", "2", "]", "=", "localdt", ".", "day", "retval", "[", "3", "]", "=", "localdt", ".", "hour", "retval", "[", "4", "]", "=", "localdt", ".", "minute", "retval", "[", "5", "]", "=", "localdt", ".", "second", "if", "d", ".", "get", "(", "'neg'", ",", "0", ")", ":", "retval", "[", "0", ":", "5", "]", "=", "map", "(", "operator", ".", "__neg__", ",", "retval", "[", "0", ":", "5", "]", ")", "return", "tuple", "(", "retval", ")" ]
Convert a dictionary to a time tuple. Depends on key values in the regexp pattern!
[ "Convert", "a", "dictionary", "to", "a", "time", "tuple", ".", "Depends", "on", "key", "values", "in", "the", "regexp", "pattern!" ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/TCtimes.py#L79-L135
244,072
rameshg87/pyremotevbox
pyremotevbox/ZSI/TCtimes.py
_localtimezone.dst
def dst(self, dt): """datetime -> DST offset in minutes east of UTC.""" tt = _localtime(_mktime((dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1))) if tt.tm_isdst > 0: return _dstdiff return _zero
python
def dst(self, dt): """datetime -> DST offset in minutes east of UTC.""" tt = _localtime(_mktime((dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1))) if tt.tm_isdst > 0: return _dstdiff return _zero
[ "def", "dst", "(", "self", ",", "dt", ")", ":", "tt", "=", "_localtime", "(", "_mktime", "(", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "dt", ".", "day", ",", "dt", ".", "hour", ",", "dt", ".", "minute", ",", "dt", ".", "second", ",", "dt", ".", "weekday", "(", ")", ",", "0", ",", "-", "1", ")", ")", ")", "if", "tt", ".", "tm_isdst", ">", "0", ":", "return", "_dstdiff", "return", "_zero" ]
datetime -> DST offset in minutes east of UTC.
[ "datetime", "-", ">", "DST", "offset", "in", "minutes", "east", "of", "UTC", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/TCtimes.py#L30-L35
244,073
rameshg87/pyremotevbox
pyremotevbox/ZSI/TCtimes.py
_localtimezone.tzname
def tzname(self, dt): """datetime -> string name of time zone.""" tt = _localtime(_mktime((dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1))) return _time.tzname[tt.tm_isdst > 0]
python
def tzname(self, dt): """datetime -> string name of time zone.""" tt = _localtime(_mktime((dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, -1))) return _time.tzname[tt.tm_isdst > 0]
[ "def", "tzname", "(", "self", ",", "dt", ")", ":", "tt", "=", "_localtime", "(", "_mktime", "(", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "dt", ".", "day", ",", "dt", ".", "hour", ",", "dt", ".", "minute", ",", "dt", ".", "second", ",", "dt", ".", "weekday", "(", ")", ",", "0", ",", "-", "1", ")", ")", ")", "return", "_time", ".", "tzname", "[", "tt", ".", "tm_isdst", ">", "0", "]" ]
datetime -> string name of time zone.
[ "datetime", "-", ">", "string", "name", "of", "time", "zone", "." ]
123dffff27da57c8faa3ac1dd4c68b1cf4558b1a
https://github.com/rameshg87/pyremotevbox/blob/123dffff27da57c8faa3ac1dd4c68b1cf4558b1a/pyremotevbox/ZSI/TCtimes.py#L40-L44
244,074
glue-viz/echo
echo/core.py
add_callback
def add_callback(instance, prop, callback, echo_old=False, priority=0): """ Attach a callback function to a property in an instance Parameters ---------- instance The instance to add the callback to prop : str Name of callback property in `instance` callback : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``func(old, new)``. If `False` (the default), will be invoked as ``func(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority). Examples -------- :: class Foo: bar = CallbackProperty(0) def callback(value): pass f = Foo() add_callback(f, 'bar', callback) """ p = getattr(type(instance), prop) if not isinstance(p, CallbackProperty): raise TypeError("%s is not a CallbackProperty" % prop) p.add_callback(instance, callback, echo_old=echo_old, priority=priority)
python
def add_callback(instance, prop, callback, echo_old=False, priority=0): """ Attach a callback function to a property in an instance Parameters ---------- instance The instance to add the callback to prop : str Name of callback property in `instance` callback : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``func(old, new)``. If `False` (the default), will be invoked as ``func(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority). Examples -------- :: class Foo: bar = CallbackProperty(0) def callback(value): pass f = Foo() add_callback(f, 'bar', callback) """ p = getattr(type(instance), prop) if not isinstance(p, CallbackProperty): raise TypeError("%s is not a CallbackProperty" % prop) p.add_callback(instance, callback, echo_old=echo_old, priority=priority)
[ "def", "add_callback", "(", "instance", ",", "prop", ",", "callback", ",", "echo_old", "=", "False", ",", "priority", "=", "0", ")", ":", "p", "=", "getattr", "(", "type", "(", "instance", ")", ",", "prop", ")", "if", "not", "isinstance", "(", "p", ",", "CallbackProperty", ")", ":", "raise", "TypeError", "(", "\"%s is not a CallbackProperty\"", "%", "prop", ")", "p", ".", "add_callback", "(", "instance", ",", "callback", ",", "echo_old", "=", "echo_old", ",", "priority", "=", "priority", ")" ]
Attach a callback function to a property in an instance Parameters ---------- instance The instance to add the callback to prop : str Name of callback property in `instance` callback : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``func(old, new)``. If `False` (the default), will be invoked as ``func(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority). Examples -------- :: class Foo: bar = CallbackProperty(0) def callback(value): pass f = Foo() add_callback(f, 'bar', callback)
[ "Attach", "a", "callback", "function", "to", "a", "property", "in", "an", "instance" ]
6ad54cc5e869de27c34e8716f2619ddc640f08fe
https://github.com/glue-viz/echo/blob/6ad54cc5e869de27c34e8716f2619ddc640f08fe/echo/core.py#L334-L372
244,075
glue-viz/echo
echo/core.py
remove_callback
def remove_callback(instance, prop, callback): """ Remove a callback function from a property in an instance Parameters ---------- instance The instance to detach the callback from prop : str Name of callback property in `instance` callback : func The callback function to remove """ p = getattr(type(instance), prop) if not isinstance(p, CallbackProperty): raise TypeError("%s is not a CallbackProperty" % prop) p.remove_callback(instance, callback)
python
def remove_callback(instance, prop, callback): """ Remove a callback function from a property in an instance Parameters ---------- instance The instance to detach the callback from prop : str Name of callback property in `instance` callback : func The callback function to remove """ p = getattr(type(instance), prop) if not isinstance(p, CallbackProperty): raise TypeError("%s is not a CallbackProperty" % prop) p.remove_callback(instance, callback)
[ "def", "remove_callback", "(", "instance", ",", "prop", ",", "callback", ")", ":", "p", "=", "getattr", "(", "type", "(", "instance", ")", ",", "prop", ")", "if", "not", "isinstance", "(", "p", ",", "CallbackProperty", ")", ":", "raise", "TypeError", "(", "\"%s is not a CallbackProperty\"", "%", "prop", ")", "p", ".", "remove_callback", "(", "instance", ",", "callback", ")" ]
Remove a callback function from a property in an instance Parameters ---------- instance The instance to detach the callback from prop : str Name of callback property in `instance` callback : func The callback function to remove
[ "Remove", "a", "callback", "function", "from", "a", "property", "in", "an", "instance" ]
6ad54cc5e869de27c34e8716f2619ddc640f08fe
https://github.com/glue-viz/echo/blob/6ad54cc5e869de27c34e8716f2619ddc640f08fe/echo/core.py#L375-L391
244,076
glue-viz/echo
echo/core.py
callback_property
def callback_property(getter): """ A decorator to build a CallbackProperty. This is used by wrapping a getter method, similar to the use of @property:: class Foo(object): @callback_property def x(self): return self._x @x.setter def x(self, value): self._x = value In simple cases with no getter or setter logic, it's easier to create a :class:`~echo.CallbackProperty` directly:: class Foo(object); x = CallbackProperty(initial_value) """ cb = CallbackProperty(getter=getter) cb.__doc__ = getter.__doc__ return cb
python
def callback_property(getter): """ A decorator to build a CallbackProperty. This is used by wrapping a getter method, similar to the use of @property:: class Foo(object): @callback_property def x(self): return self._x @x.setter def x(self, value): self._x = value In simple cases with no getter or setter logic, it's easier to create a :class:`~echo.CallbackProperty` directly:: class Foo(object); x = CallbackProperty(initial_value) """ cb = CallbackProperty(getter=getter) cb.__doc__ = getter.__doc__ return cb
[ "def", "callback_property", "(", "getter", ")", ":", "cb", "=", "CallbackProperty", "(", "getter", "=", "getter", ")", "cb", ".", "__doc__", "=", "getter", ".", "__doc__", "return", "cb" ]
A decorator to build a CallbackProperty. This is used by wrapping a getter method, similar to the use of @property:: class Foo(object): @callback_property def x(self): return self._x @x.setter def x(self, value): self._x = value In simple cases with no getter or setter logic, it's easier to create a :class:`~echo.CallbackProperty` directly:: class Foo(object); x = CallbackProperty(initial_value)
[ "A", "decorator", "to", "build", "a", "CallbackProperty", "." ]
6ad54cc5e869de27c34e8716f2619ddc640f08fe
https://github.com/glue-viz/echo/blob/6ad54cc5e869de27c34e8716f2619ddc640f08fe/echo/core.py#L394-L418
244,077
glue-viz/echo
echo/core.py
ignore_callback
def ignore_callback(instance, *props): """ Temporarily ignore any callbacks from one or more callback properties This is a context manager. Within the context block, no callbacks will be issued. In contrast with :func:`~echo.delay_callback`, no callbakcs will be called on exiting the context manager Parameters ---------- instance An instance object with callback properties *props : str One or more properties within instance to ignore Examples -------- :: with ignore_callback(foo, 'bar', 'baz'): f.bar = 20 f.baz = 30 f.bar = 10 print('done') # no callbacks called """ for prop in props: p = getattr(type(instance), prop) if not isinstance(p, CallbackProperty): raise TypeError("%s is not a CallbackProperty" % prop) p.disable(instance) if isinstance(instance, HasCallbackProperties): instance._ignore_global_callbacks(props) yield for prop in props: p = getattr(type(instance), prop) assert isinstance(p, CallbackProperty) p.enable(instance) if isinstance(instance, HasCallbackProperties): instance._unignore_global_callbacks(props)
python
def ignore_callback(instance, *props): """ Temporarily ignore any callbacks from one or more callback properties This is a context manager. Within the context block, no callbacks will be issued. In contrast with :func:`~echo.delay_callback`, no callbakcs will be called on exiting the context manager Parameters ---------- instance An instance object with callback properties *props : str One or more properties within instance to ignore Examples -------- :: with ignore_callback(foo, 'bar', 'baz'): f.bar = 20 f.baz = 30 f.bar = 10 print('done') # no callbacks called """ for prop in props: p = getattr(type(instance), prop) if not isinstance(p, CallbackProperty): raise TypeError("%s is not a CallbackProperty" % prop) p.disable(instance) if isinstance(instance, HasCallbackProperties): instance._ignore_global_callbacks(props) yield for prop in props: p = getattr(type(instance), prop) assert isinstance(p, CallbackProperty) p.enable(instance) if isinstance(instance, HasCallbackProperties): instance._unignore_global_callbacks(props)
[ "def", "ignore_callback", "(", "instance", ",", "*", "props", ")", ":", "for", "prop", "in", "props", ":", "p", "=", "getattr", "(", "type", "(", "instance", ")", ",", "prop", ")", "if", "not", "isinstance", "(", "p", ",", "CallbackProperty", ")", ":", "raise", "TypeError", "(", "\"%s is not a CallbackProperty\"", "%", "prop", ")", "p", ".", "disable", "(", "instance", ")", "if", "isinstance", "(", "instance", ",", "HasCallbackProperties", ")", ":", "instance", ".", "_ignore_global_callbacks", "(", "props", ")", "yield", "for", "prop", "in", "props", ":", "p", "=", "getattr", "(", "type", "(", "instance", ")", ",", "prop", ")", "assert", "isinstance", "(", "p", ",", "CallbackProperty", ")", "p", ".", "enable", "(", "instance", ")", "if", "isinstance", "(", "instance", ",", "HasCallbackProperties", ")", ":", "instance", ".", "_unignore_global_callbacks", "(", "props", ")" ]
Temporarily ignore any callbacks from one or more callback properties This is a context manager. Within the context block, no callbacks will be issued. In contrast with :func:`~echo.delay_callback`, no callbakcs will be called on exiting the context manager Parameters ---------- instance An instance object with callback properties *props : str One or more properties within instance to ignore Examples -------- :: with ignore_callback(foo, 'bar', 'baz'): f.bar = 20 f.baz = 30 f.bar = 10 print('done') # no callbacks called
[ "Temporarily", "ignore", "any", "callbacks", "from", "one", "or", "more", "callback", "properties" ]
6ad54cc5e869de27c34e8716f2619ddc640f08fe
https://github.com/glue-viz/echo/blob/6ad54cc5e869de27c34e8716f2619ddc640f08fe/echo/core.py#L511-L555
244,078
glue-viz/echo
echo/core.py
CallbackProperty.notify
def notify(self, instance, old, new): """ Call all callback functions with the current value Each callback will either be called using callback(new) or callback(old, new) depending on whether ``echo_old`` was set to `True` when calling :func:`~echo.add_callback` Parameters ---------- instance The instance to consider old The old value of the property new The new value of the property """ if self._disabled.get(instance, False): return for cback in self._callbacks.get(instance, []): cback(new) for cback in self._2arg_callbacks.get(instance, []): cback(old, new)
python
def notify(self, instance, old, new): """ Call all callback functions with the current value Each callback will either be called using callback(new) or callback(old, new) depending on whether ``echo_old`` was set to `True` when calling :func:`~echo.add_callback` Parameters ---------- instance The instance to consider old The old value of the property new The new value of the property """ if self._disabled.get(instance, False): return for cback in self._callbacks.get(instance, []): cback(new) for cback in self._2arg_callbacks.get(instance, []): cback(old, new)
[ "def", "notify", "(", "self", ",", "instance", ",", "old", ",", "new", ")", ":", "if", "self", ".", "_disabled", ".", "get", "(", "instance", ",", "False", ")", ":", "return", "for", "cback", "in", "self", ".", "_callbacks", ".", "get", "(", "instance", ",", "[", "]", ")", ":", "cback", "(", "new", ")", "for", "cback", "in", "self", ".", "_2arg_callbacks", ".", "get", "(", "instance", ",", "[", "]", ")", ":", "cback", "(", "old", ",", "new", ")" ]
Call all callback functions with the current value Each callback will either be called using callback(new) or callback(old, new) depending on whether ``echo_old`` was set to `True` when calling :func:`~echo.add_callback` Parameters ---------- instance The instance to consider old The old value of the property new The new value of the property
[ "Call", "all", "callback", "functions", "with", "the", "current", "value" ]
6ad54cc5e869de27c34e8716f2619ddc640f08fe
https://github.com/glue-viz/echo/blob/6ad54cc5e869de27c34e8716f2619ddc640f08fe/echo/core.py#L97-L120
244,079
glue-viz/echo
echo/core.py
CallbackProperty.add_callback
def add_callback(self, instance, func, echo_old=False, priority=0): """ Add a callback to a specific instance that manages this property Parameters ---------- instance The instance to add the callback to func : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``func(old, new)``. If `False` (the default), will be invoked as ``func(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority). """ if echo_old: self._2arg_callbacks.setdefault(instance, CallbackContainer()).append(func, priority=priority) else: self._callbacks.setdefault(instance, CallbackContainer()).append(func, priority=priority)
python
def add_callback(self, instance, func, echo_old=False, priority=0): """ Add a callback to a specific instance that manages this property Parameters ---------- instance The instance to add the callback to func : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``func(old, new)``. If `False` (the default), will be invoked as ``func(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority). """ if echo_old: self._2arg_callbacks.setdefault(instance, CallbackContainer()).append(func, priority=priority) else: self._callbacks.setdefault(instance, CallbackContainer()).append(func, priority=priority)
[ "def", "add_callback", "(", "self", ",", "instance", ",", "func", ",", "echo_old", "=", "False", ",", "priority", "=", "0", ")", ":", "if", "echo_old", ":", "self", ".", "_2arg_callbacks", ".", "setdefault", "(", "instance", ",", "CallbackContainer", "(", ")", ")", ".", "append", "(", "func", ",", "priority", "=", "priority", ")", "else", ":", "self", ".", "_callbacks", ".", "setdefault", "(", "instance", ",", "CallbackContainer", "(", ")", ")", ".", "append", "(", "func", ",", "priority", "=", "priority", ")" ]
Add a callback to a specific instance that manages this property Parameters ---------- instance The instance to add the callback to func : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``func(old, new)``. If `False` (the default), will be invoked as ``func(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority).
[ "Add", "a", "callback", "to", "a", "specific", "instance", "that", "manages", "this", "property" ]
6ad54cc5e869de27c34e8716f2619ddc640f08fe
https://github.com/glue-viz/echo/blob/6ad54cc5e869de27c34e8716f2619ddc640f08fe/echo/core.py#L134-L156
244,080
glue-viz/echo
echo/core.py
HasCallbackProperties.add_callback
def add_callback(self, name, callback, echo_old=False, priority=0): """ Add a callback that gets triggered when a callback property of the class changes. Parameters ---------- name : str The instance to add the callback to. callback : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``callback(old, new)``. If `False` (the default), will be invoked as ``callback(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority). """ if self.is_callback_property(name): prop = getattr(type(self), name) prop.add_callback(self, callback, echo_old=echo_old, priority=priority) else: raise TypeError("attribute '{0}' is not a callback property".format(name))
python
def add_callback(self, name, callback, echo_old=False, priority=0): """ Add a callback that gets triggered when a callback property of the class changes. Parameters ---------- name : str The instance to add the callback to. callback : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``callback(old, new)``. If `False` (the default), will be invoked as ``callback(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority). """ if self.is_callback_property(name): prop = getattr(type(self), name) prop.add_callback(self, callback, echo_old=echo_old, priority=priority) else: raise TypeError("attribute '{0}' is not a callback property".format(name))
[ "def", "add_callback", "(", "self", ",", "name", ",", "callback", ",", "echo_old", "=", "False", ",", "priority", "=", "0", ")", ":", "if", "self", ".", "is_callback_property", "(", "name", ")", ":", "prop", "=", "getattr", "(", "type", "(", "self", ")", ",", "name", ")", "prop", ".", "add_callback", "(", "self", ",", "callback", ",", "echo_old", "=", "echo_old", ",", "priority", "=", "priority", ")", "else", ":", "raise", "TypeError", "(", "\"attribute '{0}' is not a callback property\"", ".", "format", "(", "name", ")", ")" ]
Add a callback that gets triggered when a callback property of the class changes. Parameters ---------- name : str The instance to add the callback to. callback : func The callback function to add echo_old : bool, optional If `True`, the callback function will be invoked with both the old and new values of the property, as ``callback(old, new)``. If `False` (the default), will be invoked as ``callback(new)`` priority : int, optional This can optionally be used to force a certain order of execution of callbacks (larger values indicate a higher priority).
[ "Add", "a", "callback", "that", "gets", "triggered", "when", "a", "callback", "property", "of", "the", "class", "changes", "." ]
6ad54cc5e869de27c34e8716f2619ddc640f08fe
https://github.com/glue-viz/echo/blob/6ad54cc5e869de27c34e8716f2619ddc640f08fe/echo/core.py#L245-L268
244,081
glue-viz/echo
echo/core.py
HasCallbackProperties.iter_callback_properties
def iter_callback_properties(self): """ Iterator to loop over all callback properties. """ for name in dir(self): if self.is_callback_property(name): yield name, getattr(type(self), name)
python
def iter_callback_properties(self): """ Iterator to loop over all callback properties. """ for name in dir(self): if self.is_callback_property(name): yield name, getattr(type(self), name)
[ "def", "iter_callback_properties", "(", "self", ")", ":", "for", "name", "in", "dir", "(", "self", ")", ":", "if", "self", ".", "is_callback_property", "(", "name", ")", ":", "yield", "name", ",", "getattr", "(", "type", "(", "self", ")", ",", "name", ")" ]
Iterator to loop over all callback properties.
[ "Iterator", "to", "loop", "over", "all", "callback", "properties", "." ]
6ad54cc5e869de27c34e8716f2619ddc640f08fe
https://github.com/glue-viz/echo/blob/6ad54cc5e869de27c34e8716f2619ddc640f08fe/echo/core.py#L325-L331
244,082
cmutel/constructive_geometries
constructive_geometries/cg.py
has_gis
def has_gis(wrapped, instance, args, kwargs): """Skip function execution if there are no presamples""" if gis: return wrapped(*args, **kwargs) else: warn(MISSING_GIS)
python
def has_gis(wrapped, instance, args, kwargs): """Skip function execution if there are no presamples""" if gis: return wrapped(*args, **kwargs) else: warn(MISSING_GIS)
[ "def", "has_gis", "(", "wrapped", ",", "instance", ",", "args", ",", "kwargs", ")", ":", "if", "gis", ":", "return", "wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "warn", "(", "MISSING_GIS", ")" ]
Skip function execution if there are no presamples
[ "Skip", "function", "execution", "if", "there", "are", "no", "presamples" ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L22-L27
244,083
cmutel/constructive_geometries
constructive_geometries/cg.py
sha256
def sha256(filepath, blocksize=65536): """Generate SHA 256 hash for file at `filepath`""" hasher = hashlib.sha256() fo = open(filepath, 'rb') buf = fo.read(blocksize) while len(buf) > 0: hasher.update(buf) buf = fo.read(blocksize) return hasher.hexdigest()
python
def sha256(filepath, blocksize=65536): """Generate SHA 256 hash for file at `filepath`""" hasher = hashlib.sha256() fo = open(filepath, 'rb') buf = fo.read(blocksize) while len(buf) > 0: hasher.update(buf) buf = fo.read(blocksize) return hasher.hexdigest()
[ "def", "sha256", "(", "filepath", ",", "blocksize", "=", "65536", ")", ":", "hasher", "=", "hashlib", ".", "sha256", "(", ")", "fo", "=", "open", "(", "filepath", ",", "'rb'", ")", "buf", "=", "fo", ".", "read", "(", "blocksize", ")", "while", "len", "(", "buf", ")", ">", "0", ":", "hasher", ".", "update", "(", "buf", ")", "buf", "=", "fo", ".", "read", "(", "blocksize", ")", "return", "hasher", ".", "hexdigest", "(", ")" ]
Generate SHA 256 hash for file at `filepath`
[ "Generate", "SHA", "256", "hash", "for", "file", "at", "filepath" ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L33-L41
244,084
cmutel/constructive_geometries
constructive_geometries/cg.py
ConstructiveGeometries.check_data
def check_data(self): """Check that definitions file is present, and that faces file is readable.""" assert os.path.exists(self.data_fp) if gis: with fiona.drivers(): with fiona.open(self.faces_fp) as src: assert src.meta gpkg_hash = json.load(open(self.data_fp))['metadata']['sha256'] assert gpkg_hash == sha256(self.faces_fp)
python
def check_data(self): """Check that definitions file is present, and that faces file is readable.""" assert os.path.exists(self.data_fp) if gis: with fiona.drivers(): with fiona.open(self.faces_fp) as src: assert src.meta gpkg_hash = json.load(open(self.data_fp))['metadata']['sha256'] assert gpkg_hash == sha256(self.faces_fp)
[ "def", "check_data", "(", "self", ")", ":", "assert", "os", ".", "path", ".", "exists", "(", "self", ".", "data_fp", ")", "if", "gis", ":", "with", "fiona", ".", "drivers", "(", ")", ":", "with", "fiona", ".", "open", "(", "self", ".", "faces_fp", ")", "as", "src", ":", "assert", "src", ".", "meta", "gpkg_hash", "=", "json", ".", "load", "(", "open", "(", "self", ".", "data_fp", ")", ")", "[", "'metadata'", "]", "[", "'sha256'", "]", "assert", "gpkg_hash", "==", "sha256", "(", "self", ".", "faces_fp", ")" ]
Check that definitions file is present, and that faces file is readable.
[ "Check", "that", "definitions", "file", "is", "present", "and", "that", "faces", "file", "is", "readable", "." ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L73-L82
244,085
cmutel/constructive_geometries
constructive_geometries/cg.py
ConstructiveGeometries.load_definitions
def load_definitions(self): """Load mapping of country names to face ids""" self.data = dict(json.load(open(self.data_fp))['data']) self.all_faces = set(self.data.pop("__all__")) self.locations = set(self.data.keys())
python
def load_definitions(self): """Load mapping of country names to face ids""" self.data = dict(json.load(open(self.data_fp))['data']) self.all_faces = set(self.data.pop("__all__")) self.locations = set(self.data.keys())
[ "def", "load_definitions", "(", "self", ")", ":", "self", ".", "data", "=", "dict", "(", "json", ".", "load", "(", "open", "(", "self", ".", "data_fp", ")", ")", "[", "'data'", "]", ")", "self", ".", "all_faces", "=", "set", "(", "self", ".", "data", ".", "pop", "(", "\"__all__\"", ")", ")", "self", ".", "locations", "=", "set", "(", "self", ".", "data", ".", "keys", "(", ")", ")" ]
Load mapping of country names to face ids
[ "Load", "mapping", "of", "country", "names", "to", "face", "ids" ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L84-L88
244,086
cmutel/constructive_geometries
constructive_geometries/cg.py
ConstructiveGeometries.construct_rest_of_world
def construct_rest_of_world(self, excluded, name=None, fp=None, geom=True): """Construct rest-of-world geometry and optionally write to filepath ``fp``. Excludes faces in location list ``excluded``. ``excluded`` must be an iterable of location strings (not face ids).""" for location in excluded: assert location in self.locations, "Can't find location {}".format(location) included = self.all_faces.difference( set().union(*[set(self.data[loc]) for loc in excluded]) ) if not geom: return included elif not gis: warn(MISSING_GIS) return geom = _union(included)[1] if fp: self.write_geoms_to_file(fp, [geom], [name] if name else None) return fp else: return geom
python
def construct_rest_of_world(self, excluded, name=None, fp=None, geom=True): """Construct rest-of-world geometry and optionally write to filepath ``fp``. Excludes faces in location list ``excluded``. ``excluded`` must be an iterable of location strings (not face ids).""" for location in excluded: assert location in self.locations, "Can't find location {}".format(location) included = self.all_faces.difference( set().union(*[set(self.data[loc]) for loc in excluded]) ) if not geom: return included elif not gis: warn(MISSING_GIS) return geom = _union(included)[1] if fp: self.write_geoms_to_file(fp, [geom], [name] if name else None) return fp else: return geom
[ "def", "construct_rest_of_world", "(", "self", ",", "excluded", ",", "name", "=", "None", ",", "fp", "=", "None", ",", "geom", "=", "True", ")", ":", "for", "location", "in", "excluded", ":", "assert", "location", "in", "self", ".", "locations", ",", "\"Can't find location {}\"", ".", "format", "(", "location", ")", "included", "=", "self", ".", "all_faces", ".", "difference", "(", "set", "(", ")", ".", "union", "(", "*", "[", "set", "(", "self", ".", "data", "[", "loc", "]", ")", "for", "loc", "in", "excluded", "]", ")", ")", "if", "not", "geom", ":", "return", "included", "elif", "not", "gis", ":", "warn", "(", "MISSING_GIS", ")", "return", "geom", "=", "_union", "(", "included", ")", "[", "1", "]", "if", "fp", ":", "self", ".", "write_geoms_to_file", "(", "fp", ",", "[", "geom", "]", ",", "[", "name", "]", "if", "name", "else", "None", ")", "return", "fp", "else", ":", "return", "geom" ]
Construct rest-of-world geometry and optionally write to filepath ``fp``. Excludes faces in location list ``excluded``. ``excluded`` must be an iterable of location strings (not face ids).
[ "Construct", "rest", "-", "of", "-", "world", "geometry", "and", "optionally", "write", "to", "filepath", "fp", "." ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L90-L111
244,087
cmutel/constructive_geometries
constructive_geometries/cg.py
ConstructiveGeometries.construct_rest_of_worlds
def construct_rest_of_worlds(self, excluded, fp=None, use_mp=True, simplify=True): """Construct many rest-of-world geometries and optionally write to filepath ``fp``. ``excluded`` must be a **dictionary** of {"rest-of-world label": ["names", "of", "excluded", "locations"]}``.""" geoms = {} raw_data = [] for key in sorted(excluded): locations = excluded[key] for location in locations: assert location in self.locations, "Can't find location {}".format(location) included = self.all_faces.difference( {face for loc in locations for face in self.data[loc]} ) raw_data.append((key, self.faces_fp, included)) if use_mp: with Pool(cpu_count() - 1) as pool: results = pool.map(_union, raw_data) geoms = dict(results) else: geoms = dict([_union(row) for row in raw_data]) if simplify: geoms = {k: v.simplify(0.05) for k, v in geoms.items()} if fp: labels = sorted(geoms) self.write_geoms_to_file(fp, [geoms[key] for key in labels], labels) return fp else: return geoms
python
def construct_rest_of_worlds(self, excluded, fp=None, use_mp=True, simplify=True): """Construct many rest-of-world geometries and optionally write to filepath ``fp``. ``excluded`` must be a **dictionary** of {"rest-of-world label": ["names", "of", "excluded", "locations"]}``.""" geoms = {} raw_data = [] for key in sorted(excluded): locations = excluded[key] for location in locations: assert location in self.locations, "Can't find location {}".format(location) included = self.all_faces.difference( {face for loc in locations for face in self.data[loc]} ) raw_data.append((key, self.faces_fp, included)) if use_mp: with Pool(cpu_count() - 1) as pool: results = pool.map(_union, raw_data) geoms = dict(results) else: geoms = dict([_union(row) for row in raw_data]) if simplify: geoms = {k: v.simplify(0.05) for k, v in geoms.items()} if fp: labels = sorted(geoms) self.write_geoms_to_file(fp, [geoms[key] for key in labels], labels) return fp else: return geoms
[ "def", "construct_rest_of_worlds", "(", "self", ",", "excluded", ",", "fp", "=", "None", ",", "use_mp", "=", "True", ",", "simplify", "=", "True", ")", ":", "geoms", "=", "{", "}", "raw_data", "=", "[", "]", "for", "key", "in", "sorted", "(", "excluded", ")", ":", "locations", "=", "excluded", "[", "key", "]", "for", "location", "in", "locations", ":", "assert", "location", "in", "self", ".", "locations", ",", "\"Can't find location {}\"", ".", "format", "(", "location", ")", "included", "=", "self", ".", "all_faces", ".", "difference", "(", "{", "face", "for", "loc", "in", "locations", "for", "face", "in", "self", ".", "data", "[", "loc", "]", "}", ")", "raw_data", ".", "append", "(", "(", "key", ",", "self", ".", "faces_fp", ",", "included", ")", ")", "if", "use_mp", ":", "with", "Pool", "(", "cpu_count", "(", ")", "-", "1", ")", "as", "pool", ":", "results", "=", "pool", ".", "map", "(", "_union", ",", "raw_data", ")", "geoms", "=", "dict", "(", "results", ")", "else", ":", "geoms", "=", "dict", "(", "[", "_union", "(", "row", ")", "for", "row", "in", "raw_data", "]", ")", "if", "simplify", ":", "geoms", "=", "{", "k", ":", "v", ".", "simplify", "(", "0.05", ")", "for", "k", ",", "v", "in", "geoms", ".", "items", "(", ")", "}", "if", "fp", ":", "labels", "=", "sorted", "(", "geoms", ")", "self", ".", "write_geoms_to_file", "(", "fp", ",", "[", "geoms", "[", "key", "]", "for", "key", "in", "labels", "]", ",", "labels", ")", "return", "fp", "else", ":", "return", "geoms" ]
Construct many rest-of-world geometries and optionally write to filepath ``fp``. ``excluded`` must be a **dictionary** of {"rest-of-world label": ["names", "of", "excluded", "locations"]}``.
[ "Construct", "many", "rest", "-", "of", "-", "world", "geometries", "and", "optionally", "write", "to", "filepath", "fp", "." ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L114-L141
244,088
cmutel/constructive_geometries
constructive_geometries/cg.py
ConstructiveGeometries.construct_rest_of_worlds_mapping
def construct_rest_of_worlds_mapping(self, excluded, fp=None): """Construct topo mapping file for ``excluded``. ``excluded`` must be a **dictionary** of {"rest-of-world label": ["names", "of", "excluded", "locations"]}``. Topo mapping has the data format: .. code-block:: python { 'data': [ ['location label', ['topo face integer ids']], ], 'metadata': { 'filename': 'name of face definitions file', 'field': 'field with uniquely identifies the fields in ``filename``', 'sha256': 'SHA 256 hash of ``filename``' } } """ metadata = { 'filename': 'faces.gpkg', 'field': 'id', 'sha256': sha256(self.faces_fp) } data = [] for key, locations in excluded.items(): for location in locations: assert location in self.locations, "Can't find location {}".format(location) included = self.all_faces.difference( {face for loc in locations for face in self.data[loc]} ) data.append((key, sorted(included))) obj = {'data': data, 'metadata': metadata} if fp: with open(fp, "w") as f: json.dump(obj, f, indent=2) else: return obj
python
def construct_rest_of_worlds_mapping(self, excluded, fp=None): """Construct topo mapping file for ``excluded``. ``excluded`` must be a **dictionary** of {"rest-of-world label": ["names", "of", "excluded", "locations"]}``. Topo mapping has the data format: .. code-block:: python { 'data': [ ['location label', ['topo face integer ids']], ], 'metadata': { 'filename': 'name of face definitions file', 'field': 'field with uniquely identifies the fields in ``filename``', 'sha256': 'SHA 256 hash of ``filename``' } } """ metadata = { 'filename': 'faces.gpkg', 'field': 'id', 'sha256': sha256(self.faces_fp) } data = [] for key, locations in excluded.items(): for location in locations: assert location in self.locations, "Can't find location {}".format(location) included = self.all_faces.difference( {face for loc in locations for face in self.data[loc]} ) data.append((key, sorted(included))) obj = {'data': data, 'metadata': metadata} if fp: with open(fp, "w") as f: json.dump(obj, f, indent=2) else: return obj
[ "def", "construct_rest_of_worlds_mapping", "(", "self", ",", "excluded", ",", "fp", "=", "None", ")", ":", "metadata", "=", "{", "'filename'", ":", "'faces.gpkg'", ",", "'field'", ":", "'id'", ",", "'sha256'", ":", "sha256", "(", "self", ".", "faces_fp", ")", "}", "data", "=", "[", "]", "for", "key", ",", "locations", "in", "excluded", ".", "items", "(", ")", ":", "for", "location", "in", "locations", ":", "assert", "location", "in", "self", ".", "locations", ",", "\"Can't find location {}\"", ".", "format", "(", "location", ")", "included", "=", "self", ".", "all_faces", ".", "difference", "(", "{", "face", "for", "loc", "in", "locations", "for", "face", "in", "self", ".", "data", "[", "loc", "]", "}", ")", "data", ".", "append", "(", "(", "key", ",", "sorted", "(", "included", ")", ")", ")", "obj", "=", "{", "'data'", ":", "data", ",", "'metadata'", ":", "metadata", "}", "if", "fp", ":", "with", "open", "(", "fp", ",", "\"w\"", ")", "as", "f", ":", "json", ".", "dump", "(", "obj", ",", "f", ",", "indent", "=", "2", ")", "else", ":", "return", "obj" ]
Construct topo mapping file for ``excluded``. ``excluded`` must be a **dictionary** of {"rest-of-world label": ["names", "of", "excluded", "locations"]}``. Topo mapping has the data format: .. code-block:: python { 'data': [ ['location label', ['topo face integer ids']], ], 'metadata': { 'filename': 'name of face definitions file', 'field': 'field with uniquely identifies the fields in ``filename``', 'sha256': 'SHA 256 hash of ``filename``' } }
[ "Construct", "topo", "mapping", "file", "for", "excluded", "." ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L143-L182
244,089
cmutel/constructive_geometries
constructive_geometries/cg.py
ConstructiveGeometries.construct_difference
def construct_difference(self, parent, excluded, name=None, fp=None): """Construct geometry from ``parent`` without the regions in ``excluded`` and optionally write to filepath ``fp``. ``excluded`` must be an iterable of location strings (not face ids).""" assert parent in self.locations, "Can't find location {}".format(parent) for location in excluded: assert location in self.locations, "Can't find location {}".format(location) included = set(self.data[parent]).difference( reduce(set.union, [set(self.data[loc]) for loc in excluded]) ) geom = _union(included) if fp: self.write_geoms_to_file(fp, [geom], [name] if name else None) return fp else: return geom
python
def construct_difference(self, parent, excluded, name=None, fp=None): """Construct geometry from ``parent`` without the regions in ``excluded`` and optionally write to filepath ``fp``. ``excluded`` must be an iterable of location strings (not face ids).""" assert parent in self.locations, "Can't find location {}".format(parent) for location in excluded: assert location in self.locations, "Can't find location {}".format(location) included = set(self.data[parent]).difference( reduce(set.union, [set(self.data[loc]) for loc in excluded]) ) geom = _union(included) if fp: self.write_geoms_to_file(fp, [geom], [name] if name else None) return fp else: return geom
[ "def", "construct_difference", "(", "self", ",", "parent", ",", "excluded", ",", "name", "=", "None", ",", "fp", "=", "None", ")", ":", "assert", "parent", "in", "self", ".", "locations", ",", "\"Can't find location {}\"", ".", "format", "(", "parent", ")", "for", "location", "in", "excluded", ":", "assert", "location", "in", "self", ".", "locations", ",", "\"Can't find location {}\"", ".", "format", "(", "location", ")", "included", "=", "set", "(", "self", ".", "data", "[", "parent", "]", ")", ".", "difference", "(", "reduce", "(", "set", ".", "union", ",", "[", "set", "(", "self", ".", "data", "[", "loc", "]", ")", "for", "loc", "in", "excluded", "]", ")", ")", "geom", "=", "_union", "(", "included", ")", "if", "fp", ":", "self", ".", "write_geoms_to_file", "(", "fp", ",", "[", "geom", "]", ",", "[", "name", "]", "if", "name", "else", "None", ")", "return", "fp", "else", ":", "return", "geom" ]
Construct geometry from ``parent`` without the regions in ``excluded`` and optionally write to filepath ``fp``. ``excluded`` must be an iterable of location strings (not face ids).
[ "Construct", "geometry", "from", "parent", "without", "the", "regions", "in", "excluded", "and", "optionally", "write", "to", "filepath", "fp", "." ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L185-L200
244,090
cmutel/constructive_geometries
constructive_geometries/cg.py
ConstructiveGeometries.write_geoms_to_file
def write_geoms_to_file(self, fp, geoms, names=None): """Write unioned geometries ``geoms`` to filepath ``fp``. Optionally use ``names`` in name field.""" if fp[-5:] != '.gpkg': fp = fp + '.gpkg' if names is not None: assert len(geoms) == len(names), "Inconsistent length of geometries and names" else: names = ("Merged geometry {}".format(count) for count in itertools.count()) meta = { 'crs': {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'}, 'driver': 'GPKG', 'schema': {'geometry': 'MultiPolygon', 'properties': {'name': 'str', 'id': 'int'}} } with fiona.drivers(): with fiona.open(fp, 'w', **meta) as sink: for geom, name, count in zip(geoms, names, itertools.count(1)): sink.write({ 'geometry': _to_fiona(geom), 'properties': {'name': name, 'id': count} }) return fp
python
def write_geoms_to_file(self, fp, geoms, names=None): """Write unioned geometries ``geoms`` to filepath ``fp``. Optionally use ``names`` in name field.""" if fp[-5:] != '.gpkg': fp = fp + '.gpkg' if names is not None: assert len(geoms) == len(names), "Inconsistent length of geometries and names" else: names = ("Merged geometry {}".format(count) for count in itertools.count()) meta = { 'crs': {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'}, 'driver': 'GPKG', 'schema': {'geometry': 'MultiPolygon', 'properties': {'name': 'str', 'id': 'int'}} } with fiona.drivers(): with fiona.open(fp, 'w', **meta) as sink: for geom, name, count in zip(geoms, names, itertools.count(1)): sink.write({ 'geometry': _to_fiona(geom), 'properties': {'name': name, 'id': count} }) return fp
[ "def", "write_geoms_to_file", "(", "self", ",", "fp", ",", "geoms", ",", "names", "=", "None", ")", ":", "if", "fp", "[", "-", "5", ":", "]", "!=", "'.gpkg'", ":", "fp", "=", "fp", "+", "'.gpkg'", "if", "names", "is", "not", "None", ":", "assert", "len", "(", "geoms", ")", "==", "len", "(", "names", ")", ",", "\"Inconsistent length of geometries and names\"", "else", ":", "names", "=", "(", "\"Merged geometry {}\"", ".", "format", "(", "count", ")", "for", "count", "in", "itertools", ".", "count", "(", ")", ")", "meta", "=", "{", "'crs'", ":", "{", "'no_defs'", ":", "True", ",", "'ellps'", ":", "'WGS84'", ",", "'datum'", ":", "'WGS84'", ",", "'proj'", ":", "'longlat'", "}", ",", "'driver'", ":", "'GPKG'", ",", "'schema'", ":", "{", "'geometry'", ":", "'MultiPolygon'", ",", "'properties'", ":", "{", "'name'", ":", "'str'", ",", "'id'", ":", "'int'", "}", "}", "}", "with", "fiona", ".", "drivers", "(", ")", ":", "with", "fiona", ".", "open", "(", "fp", ",", "'w'", ",", "*", "*", "meta", ")", "as", "sink", ":", "for", "geom", ",", "name", ",", "count", "in", "zip", "(", "geoms", ",", "names", ",", "itertools", ".", "count", "(", "1", ")", ")", ":", "sink", ".", "write", "(", "{", "'geometry'", ":", "_to_fiona", "(", "geom", ")", ",", "'properties'", ":", "{", "'name'", ":", "name", ",", "'id'", ":", "count", "}", "}", ")", "return", "fp" ]
Write unioned geometries ``geoms`` to filepath ``fp``. Optionally use ``names`` in name field.
[ "Write", "unioned", "geometries", "geoms", "to", "filepath", "fp", ".", "Optionally", "use", "names", "in", "name", "field", "." ]
d38d7e8d5bf943a6499f3000004f1953af5970de
https://github.com/cmutel/constructive_geometries/blob/d38d7e8d5bf943a6499f3000004f1953af5970de/constructive_geometries/cg.py#L203-L223
244,091
edeposit/edeposit.amqp.ftp
src/edeposit/amqp/ftp/decoders/parser_csv.py
decode
def decode(data): """ Handles decoding of the CSV `data`. Args: data (str): Data which will be decoded. Returns: dict: Dictionary with decoded data. """ # try to guess dialect of the csv file dialect = None try: dialect = csv.Sniffer().sniff(data) except Exception: pass # parse data with csv parser handler = None try: data = data.splitlines() # used later handler = csv.reader(data, dialect) except Exception, e: raise MetaParsingException("Can't parse your CSV data: %s" % e.message) # make sure, that data are meaningful decoded = [] for cnt, line in enumerate(handler): usable_data = filter(lambda x: x.strip(), line) if not usable_data: continue if len(usable_data) != 2: raise MetaParsingException( "Bad number of elements - line %d:\n\t%s\n" % (cnt, data[cnt]) ) # remove trailing spaces, decode to utf-8 usable_data = map(lambda x: x.strip().decode("utf-8"), usable_data) # remove quotes if the csv.Sniffer failed to decode right `dialect` usable_data = map(lambda x: _remove_quotes(x), usable_data) decoded.append(usable_data) # apply another checks to data decoded = validator.check_structure(decoded) return decoded
python
def decode(data): """ Handles decoding of the CSV `data`. Args: data (str): Data which will be decoded. Returns: dict: Dictionary with decoded data. """ # try to guess dialect of the csv file dialect = None try: dialect = csv.Sniffer().sniff(data) except Exception: pass # parse data with csv parser handler = None try: data = data.splitlines() # used later handler = csv.reader(data, dialect) except Exception, e: raise MetaParsingException("Can't parse your CSV data: %s" % e.message) # make sure, that data are meaningful decoded = [] for cnt, line in enumerate(handler): usable_data = filter(lambda x: x.strip(), line) if not usable_data: continue if len(usable_data) != 2: raise MetaParsingException( "Bad number of elements - line %d:\n\t%s\n" % (cnt, data[cnt]) ) # remove trailing spaces, decode to utf-8 usable_data = map(lambda x: x.strip().decode("utf-8"), usable_data) # remove quotes if the csv.Sniffer failed to decode right `dialect` usable_data = map(lambda x: _remove_quotes(x), usable_data) decoded.append(usable_data) # apply another checks to data decoded = validator.check_structure(decoded) return decoded
[ "def", "decode", "(", "data", ")", ":", "# try to guess dialect of the csv file", "dialect", "=", "None", "try", ":", "dialect", "=", "csv", ".", "Sniffer", "(", ")", ".", "sniff", "(", "data", ")", "except", "Exception", ":", "pass", "# parse data with csv parser", "handler", "=", "None", "try", ":", "data", "=", "data", ".", "splitlines", "(", ")", "# used later", "handler", "=", "csv", ".", "reader", "(", "data", ",", "dialect", ")", "except", "Exception", ",", "e", ":", "raise", "MetaParsingException", "(", "\"Can't parse your CSV data: %s\"", "%", "e", ".", "message", ")", "# make sure, that data are meaningful", "decoded", "=", "[", "]", "for", "cnt", ",", "line", "in", "enumerate", "(", "handler", ")", ":", "usable_data", "=", "filter", "(", "lambda", "x", ":", "x", ".", "strip", "(", ")", ",", "line", ")", "if", "not", "usable_data", ":", "continue", "if", "len", "(", "usable_data", ")", "!=", "2", ":", "raise", "MetaParsingException", "(", "\"Bad number of elements - line %d:\\n\\t%s\\n\"", "%", "(", "cnt", ",", "data", "[", "cnt", "]", ")", ")", "# remove trailing spaces, decode to utf-8", "usable_data", "=", "map", "(", "lambda", "x", ":", "x", ".", "strip", "(", ")", ".", "decode", "(", "\"utf-8\"", ")", ",", "usable_data", ")", "# remove quotes if the csv.Sniffer failed to decode right `dialect`", "usable_data", "=", "map", "(", "lambda", "x", ":", "_remove_quotes", "(", "x", ")", ",", "usable_data", ")", "decoded", ".", "append", "(", "usable_data", ")", "# apply another checks to data", "decoded", "=", "validator", ".", "check_structure", "(", "decoded", ")", "return", "decoded" ]
Handles decoding of the CSV `data`. Args: data (str): Data which will be decoded. Returns: dict: Dictionary with decoded data.
[ "Handles", "decoding", "of", "the", "CSV", "data", "." ]
fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71
https://github.com/edeposit/edeposit.amqp.ftp/blob/fcdcbffb6e5d194e1bb4f85f0b8eaa9dbb08aa71/src/edeposit/amqp/ftp/decoders/parser_csv.py#L45-L94
244,092
laco/python-scotty
scotty/__init__.py
cli
def cli(ctx, config, quiet): """AWS ECS Docker Deployment Tool""" ctx.obj = {} ctx.obj['config'] = load_config(config.read()) # yaml.load(config.read()) ctx.obj['quiet'] = quiet log(ctx, ' * ' + rnd_scotty_quote() + ' * ')
python
def cli(ctx, config, quiet): """AWS ECS Docker Deployment Tool""" ctx.obj = {} ctx.obj['config'] = load_config(config.read()) # yaml.load(config.read()) ctx.obj['quiet'] = quiet log(ctx, ' * ' + rnd_scotty_quote() + ' * ')
[ "def", "cli", "(", "ctx", ",", "config", ",", "quiet", ")", ":", "ctx", ".", "obj", "=", "{", "}", "ctx", ".", "obj", "[", "'config'", "]", "=", "load_config", "(", "config", ".", "read", "(", ")", ")", "# yaml.load(config.read())", "ctx", ".", "obj", "[", "'quiet'", "]", "=", "quiet", "log", "(", "ctx", ",", "' * '", "+", "rnd_scotty_quote", "(", ")", "+", "' * '", ")" ]
AWS ECS Docker Deployment Tool
[ "AWS", "ECS", "Docker", "Deployment", "Tool" ]
b8d1925db881adaf06ce3c532ab3a61835dce6a8
https://github.com/laco/python-scotty/blob/b8d1925db881adaf06ce3c532ab3a61835dce6a8/scotty/__init__.py#L12-L17
244,093
yunojuno-archive/python-errordite
errordite/handlers.py
ErrorditeHandler.emit
def emit(self, record): """ Sends exception info to Errordite. This handler will ignore the log level, and look for an exception within the record (as recored.exc_info) or current stack frame (sys.exc_info()). If it finds neither, it will simply return without doing anything. """ if not self.token: raise Exception("Missing Errordite service token.") if record.levelname == 'EXCEPTION': exc_info = record.exc_info else: exc_info = sys.exc_info() if exc_info == (None, None, None): # we can't find an exception to report on, so just return return ex_type, ex_value, ex_tb = exc_info ex_source = traceback.extract_tb(ex_tb)[-1] payload = { "TimestampUtc": datetime.datetime.utcnow().isoformat(), "Token": self.token, "MachineName": platform.node(), "ExceptionInfo": { "Message": record.msg % record.args, "Source": '%s: line %s' % (ex_source[0], ex_source[1]), "ExceptionType": '%s.%s' % (ex_type.__module__, ex_type.__name__), "StackTrace": traceback.format_exc(), "MethodName": ex_source[2] } } if hasattr(record, 'version'): payload['Version'] = record.version # enrich with additional, non-core information. This may be sub- # classed payload = self.enrich_errordite_payload(payload, record) try: requests.post( ERRORDITE_API_URL, data=json.dumps(payload), headers={'content-type': 'application/json'} ) # since we already in the logger, logging an error, there's # there's really nothing we can do with the response that adds # any value - so ignore it. return 'ok' except: self.handleError(record)
python
def emit(self, record): """ Sends exception info to Errordite. This handler will ignore the log level, and look for an exception within the record (as recored.exc_info) or current stack frame (sys.exc_info()). If it finds neither, it will simply return without doing anything. """ if not self.token: raise Exception("Missing Errordite service token.") if record.levelname == 'EXCEPTION': exc_info = record.exc_info else: exc_info = sys.exc_info() if exc_info == (None, None, None): # we can't find an exception to report on, so just return return ex_type, ex_value, ex_tb = exc_info ex_source = traceback.extract_tb(ex_tb)[-1] payload = { "TimestampUtc": datetime.datetime.utcnow().isoformat(), "Token": self.token, "MachineName": platform.node(), "ExceptionInfo": { "Message": record.msg % record.args, "Source": '%s: line %s' % (ex_source[0], ex_source[1]), "ExceptionType": '%s.%s' % (ex_type.__module__, ex_type.__name__), "StackTrace": traceback.format_exc(), "MethodName": ex_source[2] } } if hasattr(record, 'version'): payload['Version'] = record.version # enrich with additional, non-core information. This may be sub- # classed payload = self.enrich_errordite_payload(payload, record) try: requests.post( ERRORDITE_API_URL, data=json.dumps(payload), headers={'content-type': 'application/json'} ) # since we already in the logger, logging an error, there's # there's really nothing we can do with the response that adds # any value - so ignore it. return 'ok' except: self.handleError(record)
[ "def", "emit", "(", "self", ",", "record", ")", ":", "if", "not", "self", ".", "token", ":", "raise", "Exception", "(", "\"Missing Errordite service token.\"", ")", "if", "record", ".", "levelname", "==", "'EXCEPTION'", ":", "exc_info", "=", "record", ".", "exc_info", "else", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "if", "exc_info", "==", "(", "None", ",", "None", ",", "None", ")", ":", "# we can't find an exception to report on, so just return", "return", "ex_type", ",", "ex_value", ",", "ex_tb", "=", "exc_info", "ex_source", "=", "traceback", ".", "extract_tb", "(", "ex_tb", ")", "[", "-", "1", "]", "payload", "=", "{", "\"TimestampUtc\"", ":", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "isoformat", "(", ")", ",", "\"Token\"", ":", "self", ".", "token", ",", "\"MachineName\"", ":", "platform", ".", "node", "(", ")", ",", "\"ExceptionInfo\"", ":", "{", "\"Message\"", ":", "record", ".", "msg", "%", "record", ".", "args", ",", "\"Source\"", ":", "'%s: line %s'", "%", "(", "ex_source", "[", "0", "]", ",", "ex_source", "[", "1", "]", ")", ",", "\"ExceptionType\"", ":", "'%s.%s'", "%", "(", "ex_type", ".", "__module__", ",", "ex_type", ".", "__name__", ")", ",", "\"StackTrace\"", ":", "traceback", ".", "format_exc", "(", ")", ",", "\"MethodName\"", ":", "ex_source", "[", "2", "]", "}", "}", "if", "hasattr", "(", "record", ",", "'version'", ")", ":", "payload", "[", "'Version'", "]", "=", "record", ".", "version", "# enrich with additional, non-core information. This may be sub-", "# classed", "payload", "=", "self", ".", "enrich_errordite_payload", "(", "payload", ",", "record", ")", "try", ":", "requests", ".", "post", "(", "ERRORDITE_API_URL", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ",", "headers", "=", "{", "'content-type'", ":", "'application/json'", "}", ")", "# since we already in the logger, logging an error, there's", "# there's really nothing we can do with the response that adds", "# any value - so ignore it.", "return", "'ok'", "except", ":", "self", ".", "handleError", "(", "record", ")" ]
Sends exception info to Errordite. This handler will ignore the log level, and look for an exception within the record (as recored.exc_info) or current stack frame (sys.exc_info()). If it finds neither, it will simply return without doing anything.
[ "Sends", "exception", "info", "to", "Errordite", "." ]
320585f6e29043b3fea11304e0f1dde3ea3a19da
https://github.com/yunojuno-archive/python-errordite/blob/320585f6e29043b3fea11304e0f1dde3ea3a19da/errordite/handlers.py#L49-L104
244,094
callowayproject/Transmogrify
transmogrify/contrib/django/templatetags/transmogrifiers.py
resolve
def resolve(var, context): """ Resolve the variable, or return the value passed to it in the first place """ try: return var.resolve(context) except template.VariableDoesNotExist: return var.var
python
def resolve(var, context): """ Resolve the variable, or return the value passed to it in the first place """ try: return var.resolve(context) except template.VariableDoesNotExist: return var.var
[ "def", "resolve", "(", "var", ",", "context", ")", ":", "try", ":", "return", "var", ".", "resolve", "(", "context", ")", "except", "template", ".", "VariableDoesNotExist", ":", "return", "var", ".", "var" ]
Resolve the variable, or return the value passed to it in the first place
[ "Resolve", "the", "variable", "or", "return", "the", "value", "passed", "to", "it", "in", "the", "first", "place" ]
f1f891b8b923b3a1ede5eac7f60531c1c472379e
https://github.com/callowayproject/Transmogrify/blob/f1f891b8b923b3a1ede5eac7f60531c1c472379e/transmogrify/contrib/django/templatetags/transmogrifiers.py#L22-L29
244,095
etcher-be/emiz
emiz/avwx/__init__.py
Metar.update
def update(self, report: str = None) -> bool: """Updates raw, data, and translations by fetching and parsing the METAR report Returns True is a new report is available, else False """ if report is not None: self.raw = report else: raw = self.service.fetch(self.station) if raw == self.raw: return False self.raw = raw self.data, self.units = metar.parse(self.station, self.raw) self.translations = translate.metar(self.data, self.units) # type: ignore self.last_updated = datetime.utcnow() return True
python
def update(self, report: str = None) -> bool: """Updates raw, data, and translations by fetching and parsing the METAR report Returns True is a new report is available, else False """ if report is not None: self.raw = report else: raw = self.service.fetch(self.station) if raw == self.raw: return False self.raw = raw self.data, self.units = metar.parse(self.station, self.raw) self.translations = translate.metar(self.data, self.units) # type: ignore self.last_updated = datetime.utcnow() return True
[ "def", "update", "(", "self", ",", "report", ":", "str", "=", "None", ")", "->", "bool", ":", "if", "report", "is", "not", "None", ":", "self", ".", "raw", "=", "report", "else", ":", "raw", "=", "self", ".", "service", ".", "fetch", "(", "self", ".", "station", ")", "if", "raw", "==", "self", ".", "raw", ":", "return", "False", "self", ".", "raw", "=", "raw", "self", ".", "data", ",", "self", ".", "units", "=", "metar", ".", "parse", "(", "self", ".", "station", ",", "self", ".", "raw", ")", "self", ".", "translations", "=", "translate", ".", "metar", "(", "self", ".", "data", ",", "self", ".", "units", ")", "# type: ignore", "self", ".", "last_updated", "=", "datetime", ".", "utcnow", "(", ")", "return", "True" ]
Updates raw, data, and translations by fetching and parsing the METAR report Returns True is a new report is available, else False
[ "Updates", "raw", "data", "and", "translations", "by", "fetching", "and", "parsing", "the", "METAR", "report" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/__init__.py#L88-L103
244,096
etcher-be/emiz
emiz/avwx/__init__.py
Metar.summary
def summary(self) -> str: """ Condensed report summary created from translations """ if not self.translations: self.update() return summary.metar(self.translations)
python
def summary(self) -> str: """ Condensed report summary created from translations """ if not self.translations: self.update() return summary.metar(self.translations)
[ "def", "summary", "(", "self", ")", "->", "str", ":", "if", "not", "self", ".", "translations", ":", "self", ".", "update", "(", ")", "return", "summary", ".", "metar", "(", "self", ".", "translations", ")" ]
Condensed report summary created from translations
[ "Condensed", "report", "summary", "created", "from", "translations" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/__init__.py#L106-L112
244,097
etcher-be/emiz
emiz/avwx/__init__.py
Taf.summary
def summary(self): # type: ignore """ Condensed summary for each forecast created from translations """ if not self.translations: self.update() return [summary.taf(trans) for trans in self.translations.forecast]
python
def summary(self): # type: ignore """ Condensed summary for each forecast created from translations """ if not self.translations: self.update() return [summary.taf(trans) for trans in self.translations.forecast]
[ "def", "summary", "(", "self", ")", ":", "# type: ignore", "if", "not", "self", ".", "translations", ":", "self", ".", "update", "(", ")", "return", "[", "summary", ".", "taf", "(", "trans", ")", "for", "trans", "in", "self", ".", "translations", ".", "forecast", "]" ]
Condensed summary for each forecast created from translations
[ "Condensed", "summary", "for", "each", "forecast", "created", "from", "translations" ]
1c3e32711921d7e600e85558ffe5d337956372de
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/__init__.py#L148-L154
244,098
jmoiron/gaspar
gaspar/consumers.py
Consumer.start
def start(self): """Start the consumer. This starts a listen loop on a zmq.PULL socket, calling ``self.handle`` on each incoming request and pushing the response on a zmq.PUSH socket back to the producer.""" if not self.initialized: raise Exception("Consumer not initialized (no Producer).") producer = self.producer context = zmq._Context() self.pull = context.socket(zmq.PULL) self.push = context.socket(zmq.PUSH) self.pull.connect('tcp://%s:%s' % (producer.host, producer.push_port)) self.push.connect('tcp://%s:%s' % (producer.host, producer.pull_port)) # TODO: notify the producer that this consumer's ready for work? self.listen()
python
def start(self): """Start the consumer. This starts a listen loop on a zmq.PULL socket, calling ``self.handle`` on each incoming request and pushing the response on a zmq.PUSH socket back to the producer.""" if not self.initialized: raise Exception("Consumer not initialized (no Producer).") producer = self.producer context = zmq._Context() self.pull = context.socket(zmq.PULL) self.push = context.socket(zmq.PUSH) self.pull.connect('tcp://%s:%s' % (producer.host, producer.push_port)) self.push.connect('tcp://%s:%s' % (producer.host, producer.pull_port)) # TODO: notify the producer that this consumer's ready for work? self.listen()
[ "def", "start", "(", "self", ")", ":", "if", "not", "self", ".", "initialized", ":", "raise", "Exception", "(", "\"Consumer not initialized (no Producer).\"", ")", "producer", "=", "self", ".", "producer", "context", "=", "zmq", ".", "_Context", "(", ")", "self", ".", "pull", "=", "context", ".", "socket", "(", "zmq", ".", "PULL", ")", "self", ".", "push", "=", "context", ".", "socket", "(", "zmq", ".", "PUSH", ")", "self", ".", "pull", ".", "connect", "(", "'tcp://%s:%s'", "%", "(", "producer", ".", "host", ",", "producer", ".", "push_port", ")", ")", "self", ".", "push", ".", "connect", "(", "'tcp://%s:%s'", "%", "(", "producer", ".", "host", ",", "producer", ".", "pull_port", ")", ")", "# TODO: notify the producer that this consumer's ready for work?", "self", ".", "listen", "(", ")" ]
Start the consumer. This starts a listen loop on a zmq.PULL socket, calling ``self.handle`` on each incoming request and pushing the response on a zmq.PUSH socket back to the producer.
[ "Start", "the", "consumer", ".", "This", "starts", "a", "listen", "loop", "on", "a", "zmq", ".", "PULL", "socket", "calling", "self", ".", "handle", "on", "each", "incoming", "request", "and", "pushing", "the", "response", "on", "a", "zmq", ".", "PUSH", "socket", "back", "to", "the", "producer", "." ]
cc9d7403a4d86382b10a7e96c6d0a020cc5e1b12
https://github.com/jmoiron/gaspar/blob/cc9d7403a4d86382b10a7e96c6d0a020cc5e1b12/gaspar/consumers.py#L29-L42
244,099
jmoiron/gaspar
gaspar/consumers.py
Consumer.listen
def listen(self): """Listen forever on the zmq.PULL socket.""" while True: message = self.pull.recv() logger.debug("received message of length %d" % len(message)) uuid, message = message[:32], message[32:] response = uuid + self.handle(message) self.push.send(response)
python
def listen(self): """Listen forever on the zmq.PULL socket.""" while True: message = self.pull.recv() logger.debug("received message of length %d" % len(message)) uuid, message = message[:32], message[32:] response = uuid + self.handle(message) self.push.send(response)
[ "def", "listen", "(", "self", ")", ":", "while", "True", ":", "message", "=", "self", ".", "pull", ".", "recv", "(", ")", "logger", ".", "debug", "(", "\"received message of length %d\"", "%", "len", "(", "message", ")", ")", "uuid", ",", "message", "=", "message", "[", ":", "32", "]", ",", "message", "[", "32", ":", "]", "response", "=", "uuid", "+", "self", ".", "handle", "(", "message", ")", "self", ".", "push", ".", "send", "(", "response", ")" ]
Listen forever on the zmq.PULL socket.
[ "Listen", "forever", "on", "the", "zmq", ".", "PULL", "socket", "." ]
cc9d7403a4d86382b10a7e96c6d0a020cc5e1b12
https://github.com/jmoiron/gaspar/blob/cc9d7403a4d86382b10a7e96c6d0a020cc5e1b12/gaspar/consumers.py#L44-L51