commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
c67d9b1b45d64743698c52331e7fadc4ed5f8236
properties/prandtl_meyer_function.py
properties/prandtl_meyer_function.py
#!/usr/bin/env python from math import atan, pi, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return nu_in_rad(m) * 180 / pi
#!/usr/bin/env python from __future__ import absolute_import, division from math import asin, atan, degrees, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return degrees(nu_in_rad(m)) def mu_in_rad(m): return asin(1/m) def mu_in_deg(m): return degrees(mu_in_rad(m))
Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg
Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg
Python
mit
iwarobots/TunnelDesign
#!/usr/bin/env python from math import atan, pi, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return nu_in_rad(m) * 180 / pi Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg
#!/usr/bin/env python from __future__ import absolute_import, division from math import asin, atan, degrees, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return degrees(nu_in_rad(m)) def mu_in_rad(m): return asin(1/m) def mu_in_deg(m): return degrees(mu_in_rad(m))
<commit_before>#!/usr/bin/env python from math import atan, pi, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return nu_in_rad(m) * 180 / pi <commit_msg>Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg<commit_after>
#!/usr/bin/env python from __future__ import absolute_import, division from math import asin, atan, degrees, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return degrees(nu_in_rad(m)) def mu_in_rad(m): return asin(1/m) def mu_in_deg(m): return degrees(mu_in_rad(m))
#!/usr/bin/env python from math import atan, pi, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return nu_in_rad(m) * 180 / pi Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg#!/usr/bin/env python from __future__ import absolute_import, division from math import asin, atan, degrees, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return degrees(nu_in_rad(m)) def mu_in_rad(m): return asin(1/m) def mu_in_deg(m): return degrees(mu_in_rad(m))
<commit_before>#!/usr/bin/env python from math import atan, pi, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return nu_in_rad(m) * 180 / pi <commit_msg>Add mu_in_rad and mu_in_deg, use built in method to convert rad to deg<commit_after>#!/usr/bin/env python from __future__ import absolute_import, division from math import asin, atan, degrees, sqrt from properties.constants import GAMMA def nu_in_rad(m): if m < 1: raise ValueError('Mach number should be greater than or equal to 1') a = (GAMMA+1) / (GAMMA-1) b = m**2 - 1 c = a**-1 * b return sqrt(a) * atan(sqrt(c)) - atan(sqrt(b)) def nu_in_deg(m): return degrees(nu_in_rad(m)) def mu_in_rad(m): return asin(1/m) def mu_in_deg(m): return degrees(mu_in_rad(m))
840efdbc3771f60881e4052feaea18a9ea7d8eda
SLA_bot/gameevent.py
SLA_bot/gameevent.py
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end)
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) class MultiShipEvent(GameEvent): def __init__(self, name, ships, start, end = None): super().__init__(name, start, end) self.ships = ships self.unscheduled = False for event in self.ships[1:]: if event: self.unscheduled = True def multi_dur(self, targets, tz): if self.unscheduled == False: return self.ships[0] ship_events = [] for index in targets: line = '`ship {:02d}: `{}'.format(index, self.ships[index]) ship_events.append(line) if len(ship_events) < 1: return '' header = self.duration(tz) body = '\n'.join(ship_events) return '{}\n{}'.format(header, body) def __repr__(self): return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, self.start, self.end)
Add functions to get external alerts
Add functions to get external alerts For the purpose of getting unscheduled events
Python
mit
EsqWiggles/SLA-bot,EsqWiggles/SLA-bot
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) Add functions to get external alerts For the purpose of getting unscheduled events
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) class MultiShipEvent(GameEvent): def __init__(self, name, ships, start, end = None): super().__init__(name, start, end) self.ships = ships self.unscheduled = False for event in self.ships[1:]: if event: self.unscheduled = True def multi_dur(self, targets, tz): if self.unscheduled == False: return self.ships[0] ship_events = [] for index in targets: line = '`ship {:02d}: `{}'.format(index, self.ships[index]) ship_events.append(line) if len(ship_events) < 1: return '' header = self.duration(tz) body = '\n'.join(ship_events) return '{}\n{}'.format(header, body) def __repr__(self): return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, self.start, self.end)
<commit_before>class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) <commit_msg>Add functions to get external alerts For the purpose of getting unscheduled events<commit_after>
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) class MultiShipEvent(GameEvent): def __init__(self, name, ships, start, end = None): super().__init__(name, start, end) self.ships = ships self.unscheduled = False for event in self.ships[1:]: if event: self.unscheduled = True def multi_dur(self, targets, tz): if self.unscheduled == False: return self.ships[0] ship_events = [] for index in targets: line = '`ship {:02d}: `{}'.format(index, self.ships[index]) ship_events.append(line) if len(ship_events) < 1: return '' header = self.duration(tz) body = '\n'.join(ship_events) return '{}\n{}'.format(header, body) def __repr__(self): return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, self.start, self.end)
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) Add functions to get external alerts For the purpose of getting unscheduled eventsclass GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) class MultiShipEvent(GameEvent): def __init__(self, name, ships, start, end = None): super().__init__(name, start, end) self.ships = ships self.unscheduled = False for event in self.ships[1:]: if event: self.unscheduled = True def multi_dur(self, targets, tz): if self.unscheduled == False: return self.ships[0] ship_events = [] for index in targets: line = '`ship {:02d}: `{}'.format(index, self.ships[index]) ship_events.append(line) if len(ship_events) < 1: return '' header = self.duration(tz) body = '\n'.join(ship_events) return '{}\n{}'.format(header, body) def __repr__(self): return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, self.start, self.end)
<commit_before>class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) <commit_msg>Add functions to get external alerts For the purpose of getting unscheduled events<commit_after>class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) class MultiShipEvent(GameEvent): def __init__(self, name, ships, start, end = None): super().__init__(name, start, end) self.ships = ships self.unscheduled = False for event in self.ships[1:]: if event: self.unscheduled = True def multi_dur(self, targets, tz): if self.unscheduled == False: return self.ships[0] ship_events = [] for index in targets: line = '`ship {:02d}: `{}'.format(index, self.ships[index]) ship_events.append(line) if len(ship_events) < 1: return '' header = self.duration(tz) body = '\n'.join(ship_events) return '{}\n{}'.format(header, body) def __repr__(self): return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, self.start, self.end)
3fec4855d53a5077762892295582601cc193d068
tests/scoring_engine/models/test_kb.py
tests/scoring_engine/models/test_kb.py
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") self.db.save(kb) assert kb.id is not None
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' assert kb.round_num == 100 def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None
Update kb test to check for port_num
Update kb test to check for port_num Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com>
Python
mit
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") self.db.save(kb) assert kb.id is not None Update kb test to check for port_num Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com>
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' assert kb.round_num == 100 def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None
<commit_before>from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") self.db.save(kb) assert kb.id is not None <commit_msg>Update kb test to check for port_num Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com><commit_after>
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' assert kb.round_num == 100 def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None
from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") self.db.save(kb) assert kb.id is not None Update kb test to check for port_num Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com>from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' assert kb.round_num == 100 def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None
<commit_before>from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6") self.db.save(kb) assert kb.id is not None <commit_msg>Update kb test to check for port_num Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com><commit_after>from scoring_engine.models.kb import KB from tests.scoring_engine.unit_test import UnitTest class TestKB(UnitTest): def test_init_property(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=100) assert kb.id is None assert kb.name == 'task_ids' assert kb.value == '1,2,3,4,5,6' assert kb.round_num == 100 def test_basic_kb(self): kb = KB(name="task_ids", value="1,2,3,4,5,6", round_num=50) self.db.save(kb) assert kb.id is not None
b3975b4e5b855eb212cc5171e17ed93e315f1c30
cheap_repr/utils.py
cheap_repr/utils.py
import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip()
import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError, SyntaxError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip()
Make safe_qualname more permissive (getting syntax errors on travis in 2.6)
Make safe_qualname more permissive (getting syntax errors on travis in 2.6)
Python
mit
alexmojaki/cheap_repr,alexmojaki/cheap_repr
import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip() Make safe_qualname more permissive (getting syntax errors on travis in 2.6)
import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError, SyntaxError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip()
<commit_before>import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip() <commit_msg>Make safe_qualname more permissive (getting syntax errors on travis in 2.6)<commit_after>
import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError, SyntaxError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip()
import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip() Make safe_qualname more permissive (getting syntax errors on travis in 2.6)import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError, SyntaxError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip()
<commit_before>import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip() <commit_msg>Make safe_qualname more permissive (getting syntax errors on travis in 2.6)<commit_after>import traceback from qualname import qualname def safe_qualname(cls): # type: (type) -> str result = _safe_qualname_cache.get(cls) if not result: try: result = qualname(cls) except (AttributeError, IOError, SyntaxError): result = cls.__name__ if '<locals>' not in result: _safe_qualname_cache[cls] = result return result _safe_qualname_cache = {} def type_name(x): return safe_qualname(x.__class__) def exception_string(exc): assert isinstance(exc, BaseException) return ''.join(traceback.format_exception_only(type(exc), exc)).strip()
94a128b7202c5629b77208a401222f3dcba92196
anonymoustre/main.py
anonymoustre/main.py
from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores, mx_toolbox_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main()
from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful # mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main()
Comment mxtoolbox because of limited requests
Comment mxtoolbox because of limited requests
Python
mit
Dominionized/anonymoustre,Dominionized/anonymoustre
from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores, mx_toolbox_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main() Comment mxtoolbox because of limited requests
from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful # mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main()
<commit_before>from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores, mx_toolbox_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main() <commit_msg>Comment mxtoolbox because of limited requests<commit_after>
from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful # mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main()
from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores, mx_toolbox_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main() Comment mxtoolbox because of limited requestsfrom functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful # mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main()
<commit_before>from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores, mx_toolbox_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main() <commit_msg>Comment mxtoolbox because of limited requests<commit_after>from functools import reduce import pprint import time import shodan import requests import api_key from google_api import query_google_api from shodan_api import query_shodan_api from mxtoolbox_api import query_mxtoolbox_api from utils import assoc_default_score, combine_scores pp = pprint.PrettyPrinter(indent=2) def main(): start_time = time.time() # No more than 10 requests ips = ['103.245.153.70'] scored_ips = assoc_default_score(ips) shodan_scores = query_shodan_api(ips) google_scores = query_google_api(ips) # Limited number of requests... Be careful # mx_toolbox_scores = query_mxtoolbox_api(ips) results = reduce(combine_scores, [scored_ips, shodan_scores, google_scores]) pp.pprint(results) print("--------- %s seconds -------" % (time.time() - start_time)) return results def get_some_ips(): req = requests.get("https://zeustracker.abuse.ch/blocklist.php?download=badips") return [line for line in req.text.split('\n') if line and line[0].isdigit()] def get_bad_ips(): with open("bad_ips.txt", "r") as file: ips = list(filter(lambda line: line != '', file.read().split("\n"))) return ips if __name__ == "__main__": main()
a605e6b294e941d9278601c3af0330f0b802534e
src/controller.py
src/controller.py
#!/usr/bin/env python import rospy def compute_control_actions(msg): pass if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin()
#!/usr/bin/env python import rospy import tf from nav_msgs.msg import Odometry i = 0 def get_position(pose): return pose.pose.position def get_orientation(pose): quaternion = ( pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w ) return tf.transformations.euler_from_quaternion(quaternion) def compute_control_actions(msg): global i pose = msg.pose current_position = get_position(pose) current_orientation = get_orientation(pose) if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin()
Implement functions to obtain position and orientation given a pose
Implement functions to obtain position and orientation given a pose
Python
mit
bit0001/trajectory_tracking,bit0001/trajectory_tracking
#!/usr/bin/env python import rospy def compute_control_actions(msg): pass if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin() Implement functions to obtain position and orientation given a pose
#!/usr/bin/env python import rospy import tf from nav_msgs.msg import Odometry i = 0 def get_position(pose): return pose.pose.position def get_orientation(pose): quaternion = ( pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w ) return tf.transformations.euler_from_quaternion(quaternion) def compute_control_actions(msg): global i pose = msg.pose current_position = get_position(pose) current_orientation = get_orientation(pose) if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin()
<commit_before>#!/usr/bin/env python import rospy def compute_control_actions(msg): pass if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin() <commit_msg>Implement functions to obtain position and orientation given a pose<commit_after>
#!/usr/bin/env python import rospy import tf from nav_msgs.msg import Odometry i = 0 def get_position(pose): return pose.pose.position def get_orientation(pose): quaternion = ( pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w ) return tf.transformations.euler_from_quaternion(quaternion) def compute_control_actions(msg): global i pose = msg.pose current_position = get_position(pose) current_orientation = get_orientation(pose) if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin()
#!/usr/bin/env python import rospy def compute_control_actions(msg): pass if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin() Implement functions to obtain position and orientation given a pose#!/usr/bin/env python import rospy import tf from nav_msgs.msg import Odometry i = 0 def get_position(pose): return pose.pose.position def get_orientation(pose): quaternion = ( pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w ) return tf.transformations.euler_from_quaternion(quaternion) def compute_control_actions(msg): global i pose = msg.pose current_position = get_position(pose) current_orientation = get_orientation(pose) if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin()
<commit_before>#!/usr/bin/env python import rospy def compute_control_actions(msg): pass if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin() <commit_msg>Implement functions to obtain position and orientation given a pose<commit_after>#!/usr/bin/env python import rospy import tf from nav_msgs.msg import Odometry i = 0 def get_position(pose): return pose.pose.position def get_orientation(pose): quaternion = ( pose.pose.orientation.x, pose.pose.orientation.y, pose.pose.orientation.z, pose.pose.orientation.w ) return tf.transformations.euler_from_quaternion(quaternion) def compute_control_actions(msg): global i pose = msg.pose current_position = get_position(pose) current_orientation = get_orientation(pose) if __name__ == '__main__': rospy.init_node('controller') subscriber = rospy.Subscriber('odometry_10_hz', Odometry, compute_control_actions) rospy.spin()
ee9b6b1640745bb7b757f1ec8603b19d4f678fb8
core/observables/file.py
core/observables/file.py
from __future__ import unicode_literals from mongoengine import * from core.observables import Observable from core.observables import Hash class File(Observable): value = StringField(verbose_name="SHA256 hash") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i
from __future__ import unicode_literals from flask import url_for from flask_mongoengine.wtf import model_form from mongoengine import * from core.observables import Observable from core.database import StringListField class File(Observable): value = StringField(verbose_name="Value") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] exclude_fields = Observable.exclude_fields + ['hashes', 'body'] @classmethod def get_form(klass): form = model_form(klass, exclude=klass.exclude_fields) form.filenames = StringListField("Filenames") return form @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i
Clean up File edit view
Clean up File edit view
Python
apache-2.0
yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti
from __future__ import unicode_literals from mongoengine import * from core.observables import Observable from core.observables import Hash class File(Observable): value = StringField(verbose_name="SHA256 hash") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i Clean up File edit view
from __future__ import unicode_literals from flask import url_for from flask_mongoengine.wtf import model_form from mongoengine import * from core.observables import Observable from core.database import StringListField class File(Observable): value = StringField(verbose_name="Value") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] exclude_fields = Observable.exclude_fields + ['hashes', 'body'] @classmethod def get_form(klass): form = model_form(klass, exclude=klass.exclude_fields) form.filenames = StringListField("Filenames") return form @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i
<commit_before>from __future__ import unicode_literals from mongoengine import * from core.observables import Observable from core.observables import Hash class File(Observable): value = StringField(verbose_name="SHA256 hash") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i <commit_msg>Clean up File edit view<commit_after>
from __future__ import unicode_literals from flask import url_for from flask_mongoengine.wtf import model_form from mongoengine import * from core.observables import Observable from core.database import StringListField class File(Observable): value = StringField(verbose_name="Value") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] exclude_fields = Observable.exclude_fields + ['hashes', 'body'] @classmethod def get_form(klass): form = model_form(klass, exclude=klass.exclude_fields) form.filenames = StringListField("Filenames") return form @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i
from __future__ import unicode_literals from mongoengine import * from core.observables import Observable from core.observables import Hash class File(Observable): value = StringField(verbose_name="SHA256 hash") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i Clean up File edit viewfrom __future__ import unicode_literals from flask import url_for from flask_mongoengine.wtf import model_form from mongoengine import * from core.observables import Observable from core.database import StringListField class File(Observable): value = StringField(verbose_name="Value") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] exclude_fields = Observable.exclude_fields + ['hashes', 'body'] @classmethod def get_form(klass): form = model_form(klass, exclude=klass.exclude_fields) form.filenames = StringListField("Filenames") return form @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i
<commit_before>from __future__ import unicode_literals from mongoengine import * from core.observables import Observable from core.observables import Hash class File(Observable): value = StringField(verbose_name="SHA256 hash") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i <commit_msg>Clean up File edit view<commit_after>from __future__ import unicode_literals from flask import url_for from flask_mongoengine.wtf import model_form from mongoengine import * from core.observables import Observable from core.database import StringListField class File(Observable): value = StringField(verbose_name="Value") mime_type = StringField(verbose_name="MIME type") hashes = DictField(verbose_name="Hashes") body = ReferenceField("AttachedFile") filenames = ListField(StringField(), verbose_name="Filenames") DISPLAY_FIELDS = Observable.DISPLAY_FIELDS + [("mime_type", "MIME Type")] exclude_fields = Observable.exclude_fields + ['hashes', 'body'] @classmethod def get_form(klass): form = model_form(klass, exclude=klass.exclude_fields) form.filenames = StringListField("Filenames") return form @staticmethod def check_type(txt): return True def info(self): i = Observable.info(self) i['mime_type'] = self.mime_type i['hashes'] = self.hashes return i
59579d578a52fe592ef0ca1a275b15b4b37d3c51
logtacts/settings/heroku.py
logtacts/settings/heroku.py
from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )
from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', '.pebble.ink', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )
Add pebble.ink to allowed hosts (flynn deployment)
Add pebble.ink to allowed hosts (flynn deployment)
Python
mit
phildini/logtacts,phildini/logtacts,phildini/logtacts,phildini/logtacts,phildini/logtacts
from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )Add pebble.ink to allowed hosts (flynn deployment)
from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', '.pebble.ink', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )
<commit_before>from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )<commit_msg>Add pebble.ink to allowed hosts (flynn deployment)<commit_after>
from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', '.pebble.ink', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )
from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )Add pebble.ink to allowed hosts (flynn deployment)from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', '.pebble.ink', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )
<commit_before>from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )<commit_msg>Add pebble.ink to allowed hosts (flynn deployment)<commit_after>from .base import * import dj_database_url DEBUG = False TEMPLATE_DEBUG = DEBUG DATABASES['default'] = dj_database_url.config() SECRET_KEY = get_env_variable("SECRET_KEY") ALLOWED_HOSTS = [ 'localhost', '127.0.0.1', '.herokuapp.com', '.pebble.ink', ] STATIC_URL = '//logtacts.s3.amazonaws.com/assets/' INSTALLED_APPS += ( 'gunicorn', )
0d6a8f3978188f3e343c364806e0bb6e6ac1e643
tests/qtcore/qmetaobject_test.py
tests/qtcore/qmetaobject_test.py
#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") self.assertEqual(m.methodCount(), QFile().metaObject().methodCount()) if __name__ == '__main__': unittest.main()
#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") f = QFile() fm = f.metaObject() self.assertEqual(m.methodCount(), fm.methodCount()) if __name__ == '__main__': unittest.main()
Fix qmentaobject test to work with dynamic metaobject.
Fix qmentaobject test to work with dynamic metaobject.
Python
lgpl-2.1
M4rtinK/pyside-android,pankajp/pyside,enthought/pyside,PySide/PySide,qtproject/pyside-pyside,PySide/PySide,gbaty/pyside2,enthought/pyside,RobinD42/pyside,enthought/pyside,PySide/PySide,BadSingleton/pyside2,M4rtinK/pyside-android,BadSingleton/pyside2,qtproject/pyside-pyside,BadSingleton/pyside2,RobinD42/pyside,pankajp/pyside,gbaty/pyside2,pankajp/pyside,RobinD42/pyside,M4rtinK/pyside-bb10,qtproject/pyside-pyside,enthought/pyside,M4rtinK/pyside-bb10,enthought/pyside,gbaty/pyside2,qtproject/pyside-pyside,M4rtinK/pyside-bb10,M4rtinK/pyside-android,BadSingleton/pyside2,RobinD42/pyside,qtproject/pyside-pyside,IronManMark20/pyside2,M4rtinK/pyside-bb10,IronManMark20/pyside2,BadSingleton/pyside2,M4rtinK/pyside-bb10,pankajp/pyside,IronManMark20/pyside2,RobinD42/pyside,IronManMark20/pyside2,RobinD42/pyside,PySide/PySide,IronManMark20/pyside2,gbaty/pyside2,pankajp/pyside,enthought/pyside,RobinD42/pyside,PySide/PySide,M4rtinK/pyside-android,M4rtinK/pyside-bb10,M4rtinK/pyside-android,gbaty/pyside2,enthought/pyside,M4rtinK/pyside-android
#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") self.assertEqual(m.methodCount(), QFile().metaObject().methodCount()) if __name__ == '__main__': unittest.main() Fix qmentaobject test to work with dynamic metaobject.
#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") f = QFile() fm = f.metaObject() self.assertEqual(m.methodCount(), fm.methodCount()) if __name__ == '__main__': unittest.main()
<commit_before>#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") self.assertEqual(m.methodCount(), QFile().metaObject().methodCount()) if __name__ == '__main__': unittest.main() <commit_msg>Fix qmentaobject test to work with dynamic metaobject.<commit_after>
#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") f = QFile() fm = f.metaObject() self.assertEqual(m.methodCount(), fm.methodCount()) if __name__ == '__main__': unittest.main()
#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") self.assertEqual(m.methodCount(), QFile().metaObject().methodCount()) if __name__ == '__main__': unittest.main() Fix qmentaobject test to work with dynamic metaobject.#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") f = QFile() fm = f.metaObject() self.assertEqual(m.methodCount(), fm.methodCount()) if __name__ == '__main__': unittest.main()
<commit_before>#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") self.assertEqual(m.methodCount(), QFile().metaObject().methodCount()) if __name__ == '__main__': unittest.main() <commit_msg>Fix qmentaobject test to work with dynamic metaobject.<commit_after>#!/usr/bin/python # -*- coding: utf-8 -*- '''Tests for static methos conflicts with class methods''' import unittest from PySide.QtCore import * class Foo(QFile): pass class qmetaobject_test(unittest.TestCase): def test_QMetaObject(self): qobj = QObject() qobj_metaobj = qobj.metaObject() self.assertEqual(qobj_metaobj.className(), "QObject") obj = QFile() m = obj.metaObject() self.assertEqual(m.className(), "QFile") self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount()) obj = Foo() m = obj.metaObject() self.assertEqual(m.className(), "Foo") f = QFile() fm = f.metaObject() self.assertEqual(m.methodCount(), fm.methodCount()) if __name__ == '__main__': unittest.main()
80638b2070f578408f00d7a263ccfb27fea5b1d4
api/base/language.py
api/base/language.py
from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL to confirm.')
from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL with same ' 'query parameters to confirm.')
Update delete warning to include instructions that same query parameters need to be in request
Update delete warning to include instructions that same query parameters need to be in request
Python
apache-2.0
cwisecarver/osf.io,emetsger/osf.io,rdhyee/osf.io,caseyrollins/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,sloria/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,mluke93/osf.io,pattisdr/osf.io,acshi/osf.io,GageGaskins/osf.io,leb2dg/osf.io,zachjanicki/osf.io,amyshi188/osf.io,zamattiac/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,billyhunt/osf.io,chennan47/osf.io,aaxelb/osf.io,ZobairAlijan/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,GageGaskins/osf.io,kwierman/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,Ghalko/osf.io,doublebits/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,mluke93/osf.io,brandonPurvis/osf.io,emetsger/osf.io,laurenrevere/osf.io,asanfilippo7/osf.io,KAsante95/osf.io,chennan47/osf.io,binoculars/osf.io,caneruguz/osf.io,SSJohns/osf.io,mluke93/osf.io,billyhunt/osf.io,Nesiehr/osf.io,abought/osf.io,ZobairAlijan/osf.io,mluo613/osf.io,kch8qx/osf.io,caneruguz/osf.io,mfraezz/osf.io,erinspace/osf.io,RomanZWang/osf.io,rdhyee/osf.io,Ghalko/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,kwierman/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,leb2dg/osf.io,aaxelb/osf.io,GageGaskins/osf.io,acshi/osf.io,icereval/osf.io,samchrisinger/osf.io,hmoco/osf.io,samchrisinger/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,KAsante95/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,chrisseto/osf.io,adlius/osf.io,billyhunt/osf.io,amyshi188/osf.io,Ghalko/osf.io,caseyrygt/osf.io,sloria/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,wearpants/osf.io,sloria/osf.io,adlius/osf.io,ticklemepierce/osf.io,wearpants/osf.io,mattclark/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,mluke93/osf.io,mattclark/osf.io,TomHeatwole/osf.io,erinspace/osf.io,caseyrygt/osf.io,KAsante95/osf.io,zachjanicki/osf.io,Ghalko/osf.io,SSJohns/osf.io,TomBaxter/osf.io,TomHeatwole/osf.io,aaxelb/osf.io,samanehsan/osf.io,ticklemepierce/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,zamattiac/osf.io,KAsante95/osf.io,binoculars/osf.io,acshi/osf.io,rdhyee/osf.io,pattisdr/osf.io,jnayak1/osf.io,baylee-d/osf.io,kch8qx/osf.io,zachjanicki/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,danielneis/osf.io,chennan47/osf.io,monikagrabowska/osf.io,binoculars/osf.io,kch8qx/osf.io,billyhunt/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,danielneis/osf.io,zamattiac/osf.io,abought/osf.io,cslzchen/osf.io,chrisseto/osf.io,mfraezz/osf.io,jnayak1/osf.io,jnayak1/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,wearpants/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,abought/osf.io,ticklemepierce/osf.io,doublebits/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,acshi/osf.io,felliott/osf.io,emetsger/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,felliott/osf.io,RomanZWang/osf.io,mluo613/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,doublebits/osf.io,mattclark/osf.io,kch8qx/osf.io,amyshi188/osf.io,billyhunt/osf.io,alexschiller/osf.io,acshi/osf.io,KAsante95/osf.io,felliott/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,abought/osf.io,hmoco/osf.io,mfraezz/osf.io,caseyrollins/osf.io,icereval/osf.io,danielneis/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,caneruguz/osf.io,crcresearch/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,hmoco/osf.io,hmoco/osf.io,alexschiller/osf.io,chrisseto/osf.io,aaxelb/osf.io,felliott/osf.io,doublebits/osf.io,danielneis/osf.io,pattisdr/osf.io,adlius/osf.io,mluo613/osf.io,SSJohns/osf.io,emetsger/osf.io,mfraezz/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,mluo613/osf.io,kwierman/osf.io,mluo613/osf.io,alexschiller/osf.io,samanehsan/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,erinspace/osf.io,kwierman/osf.io,doublebits/osf.io,samchrisinger/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,caseyrygt/osf.io,rdhyee/osf.io,SSJohns/osf.io,adlius/osf.io
from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL to confirm.') Update delete warning to include instructions that same query parameters need to be in request
from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL with same ' 'query parameters to confirm.')
<commit_before>from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL to confirm.') <commit_msg>Update delete warning to include instructions that same query parameters need to be in request<commit_after>
from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL with same ' 'query parameters to confirm.')
from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL to confirm.') Update delete warning to include instructions that same query parameters need to be in requestfrom django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL with same ' 'query parameters to confirm.')
<commit_before>from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL to confirm.') <commit_msg>Update delete warning to include instructions that same query parameters need to be in request<commit_after>from django.utils.translation import ugettext_lazy as _ BEFORE_BULK_DELETE = _('Are you sure you want to delete these projects? They will no longer be ' 'available to other contributors. Send delete request to new URL with same ' 'query parameters to confirm.')
b82cc421f0bd19caebc20900d774f40831746dab
numba/traits.py
numba/traits.py
""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def __get__(self, instance, owner): obj = getattr(instance, self.value) return getattr(obj, self.delegate_attr_name or self.attr_name) def __set__(self, instance, value): obj = getattr(instance, self.value) return setattr(obj, self.delegate_attr_name or self.attr_name, value)
""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def obj(self, instance): return getattr(instance, self.value) @property def attr(self): return self.delegate_attr_name or self.attr_name def __get__(self, instance, owner): return getattr(self.obj(instance), self.attr) def __set__(self, instance, value): return setattr(self.obj(instance), self.attr, value) def __delete__(self, instance): delattr(self.obj(instance), self.attr)
Support deletion of trait delegate
Support deletion of trait delegate
Python
bsd-2-clause
pitrou/numba,GaZ3ll3/numba,stefanseefeld/numba,GaZ3ll3/numba,seibert/numba,pitrou/numba,stonebig/numba,stefanseefeld/numba,gdementen/numba,GaZ3ll3/numba,stuartarchibald/numba,IntelLabs/numba,jriehl/numba,cpcloud/numba,gdementen/numba,jriehl/numba,pombredanne/numba,numba/numba,GaZ3ll3/numba,seibert/numba,ssarangi/numba,cpcloud/numba,numba/numba,stuartarchibald/numba,GaZ3ll3/numba,ssarangi/numba,pitrou/numba,seibert/numba,gdementen/numba,pombredanne/numba,gdementen/numba,cpcloud/numba,ssarangi/numba,pombredanne/numba,IntelLabs/numba,cpcloud/numba,gmarkall/numba,stuartarchibald/numba,stonebig/numba,sklam/numba,stuartarchibald/numba,sklam/numba,gmarkall/numba,IntelLabs/numba,pitrou/numba,ssarangi/numba,sklam/numba,jriehl/numba,numba/numba,sklam/numba,pitrou/numba,stefanseefeld/numba,cpcloud/numba,IntelLabs/numba,stonebig/numba,jriehl/numba,stefanseefeld/numba,numba/numba,stonebig/numba,stefanseefeld/numba,seibert/numba,pombredanne/numba,gdementen/numba,IntelLabs/numba,stonebig/numba,gmarkall/numba,ssarangi/numba,seibert/numba,sklam/numba,stuartarchibald/numba,numba/numba,pombredanne/numba,gmarkall/numba,gmarkall/numba,jriehl/numba
""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def __get__(self, instance, owner): obj = getattr(instance, self.value) return getattr(obj, self.delegate_attr_name or self.attr_name) def __set__(self, instance, value): obj = getattr(instance, self.value) return setattr(obj, self.delegate_attr_name or self.attr_name, value) Support deletion of trait delegate
""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def obj(self, instance): return getattr(instance, self.value) @property def attr(self): return self.delegate_attr_name or self.attr_name def __get__(self, instance, owner): return getattr(self.obj(instance), self.attr) def __set__(self, instance, value): return setattr(self.obj(instance), self.attr, value) def __delete__(self, instance): delattr(self.obj(instance), self.attr)
<commit_before>""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def __get__(self, instance, owner): obj = getattr(instance, self.value) return getattr(obj, self.delegate_attr_name or self.attr_name) def __set__(self, instance, value): obj = getattr(instance, self.value) return setattr(obj, self.delegate_attr_name or self.attr_name, value) <commit_msg>Support deletion of trait delegate<commit_after>
""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def obj(self, instance): return getattr(instance, self.value) @property def attr(self): return self.delegate_attr_name or self.attr_name def __get__(self, instance, owner): return getattr(self.obj(instance), self.attr) def __set__(self, instance, value): return setattr(self.obj(instance), self.attr, value) def __delete__(self, instance): delattr(self.obj(instance), self.attr)
""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def __get__(self, instance, owner): obj = getattr(instance, self.value) return getattr(obj, self.delegate_attr_name or self.attr_name) def __set__(self, instance, value): obj = getattr(instance, self.value) return setattr(obj, self.delegate_attr_name or self.attr_name, value) Support deletion of trait delegate""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def obj(self, instance): return getattr(instance, self.value) @property def attr(self): return self.delegate_attr_name or self.attr_name def __get__(self, instance, owner): return getattr(self.obj(instance), self.attr) def __set__(self, instance, value): return setattr(self.obj(instance), self.attr, value) def __delete__(self, instance): delattr(self.obj(instance), self.attr)
<commit_before>""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def __get__(self, instance, owner): obj = getattr(instance, self.value) return getattr(obj, self.delegate_attr_name or self.attr_name) def __set__(self, instance, value): obj = getattr(instance, self.value) return setattr(obj, self.delegate_attr_name or self.attr_name, value) <commit_msg>Support deletion of trait delegate<commit_after>""" Minimal traits implementation: @traits class MyClass(object): attr = Instance(SomeClass) my_delegation = Delegate('attr') """ import inspect # from numba.utils import TypedProperty def traits(cls): "@traits class decorator" for name, py_func in vars(cls).items(): if isinstance(py_func, TraitBase): py_func.set_attr_name(name) return cls class TraitBase(object): "Base class for traits" def __init__(self, value, doc=None): self.value = value self.doc = doc def set_attr_name(self, name): self.attr_name = name class Delegate(TraitBase): """ Delegate to some other object. """ def __init__(self, value, delegate_attr_name=None, doc=None): super(Delegate, self).__init__(value, doc=doc) self.delegate_attr_name = delegate_attr_name def obj(self, instance): return getattr(instance, self.value) @property def attr(self): return self.delegate_attr_name or self.attr_name def __get__(self, instance, owner): return getattr(self.obj(instance), self.attr) def __set__(self, instance, value): return setattr(self.obj(instance), self.attr, value) def __delete__(self, instance): delattr(self.obj(instance), self.attr)
0c6a5c55df5680bd8589f1040f2f16cf6aac86b3
openprescribing/frontend/migrations/0030_add_ccg_centroids.py
openprescribing/frontend/migrations/0030_add_ccg_centroids.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), # This is now commented out because the SQL generated to execute # set_centroids_without_args includes a reference to fiels which aren't # created until migration 36. # migrations.RunPython(set_centroids_without_args), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), ]
Remove commented-out RunPython from migration
Remove commented-out RunPython from migration
Python
mit
ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing
# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), # This is now commented out because the SQL generated to execute # set_centroids_without_args includes a reference to fiels which aren't # created until migration 36. # migrations.RunPython(set_centroids_without_args), ] Remove commented-out RunPython from migration
# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), # This is now commented out because the SQL generated to execute # set_centroids_without_args includes a reference to fiels which aren't # created until migration 36. # migrations.RunPython(set_centroids_without_args), ] <commit_msg>Remove commented-out RunPython from migration<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), # This is now commented out because the SQL generated to execute # set_centroids_without_args includes a reference to fiels which aren't # created until migration 36. # migrations.RunPython(set_centroids_without_args), ] Remove commented-out RunPython from migration# -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), # This is now commented out because the SQL generated to execute # set_centroids_without_args includes a reference to fiels which aren't # created until migration 36. # migrations.RunPython(set_centroids_without_args), ] <commit_msg>Remove commented-out RunPython from migration<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2017-10-20 08:13 from __future__ import unicode_literals from django.db import migrations from frontend.management.commands.import_ccg_boundaries import set_centroids import django.contrib.gis.db.models.fields def set_centroids_without_args(*args): set_centroids() class Migration(migrations.Migration): dependencies = [ ('frontend', '0031_auto_20171004_1330'), ] operations = [ migrations.AddField( model_name='pct', name='centroid', field=django.contrib.gis.db.models.fields.PointField( blank=True, null=True, srid=4326), ), ]
9e7af5b54b35f37a89c0845da077d9efa9be55fc
flask_toolbox/web/configs.py
flask_toolbox/web/configs.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False class DevelopmentConfig(Config): DEBUG = True ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class DevelopmentConfig(Config): DEBUG = True class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True
Move the database config to the basic Config
Move the database config to the basic Config
Python
mit
lord63/flask_toolbox,lord63/flask_toolbox
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False class DevelopmentConfig(Config): DEBUG = True ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True Move the database config to the basic Config
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class DevelopmentConfig(Config): DEBUG = True class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False class DevelopmentConfig(Config): DEBUG = True ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True <commit_msg>Move the database config to the basic Config<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class DevelopmentConfig(Config): DEBUG = True class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False class DevelopmentConfig(Config): DEBUG = True ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True Move the database config to the basic Config#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class DevelopmentConfig(Config): DEBUG = True class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False class DevelopmentConfig(Config): DEBUG = True ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True <commit_msg>Move the database config to the basic Config<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import import os class Config(object): SECRET_KEY = '123456790' # Create dummy secrey key so we can use sessions SQLALCHEMY_TRACK_MODIFICATIONS = False ROOT = os.path.dirname(os.path.realpath(__file__)) SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format( os.path.join(ROOT, 'flask_toolbox_dev.db')) class DevelopmentConfig(Config): DEBUG = True class ProductionConfig(Config): DEBUG = False class TestConfig(Config): DEBUG = True TESTING = True
c4e1382773d1a77c0f76faf15561070f2c1b053f
product_management_group/models/ir_model_access.py
product_management_group/models/ir_model_access.py
############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products'): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception)
############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products') or self.env.is_superuser(): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception)
Allow superuser to skip that restriction on products
[FIX] product_management_group: Allow superuser to skip that restriction on products closes ingadhoc/product#409 Signed-off-by: Nicolas Mac Rouillon <8d34fe7b7c65100e706828a8c0d03426900ffb59@adhoc.com.ar>
Python
agpl-3.0
ingadhoc/product,ingadhoc/product
############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products'): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception) [FIX] product_management_group: Allow superuser to skip that restriction on products closes ingadhoc/product#409 Signed-off-by: Nicolas Mac Rouillon <8d34fe7b7c65100e706828a8c0d03426900ffb59@adhoc.com.ar>
############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products') or self.env.is_superuser(): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception)
<commit_before>############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products'): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception) <commit_msg>[FIX] product_management_group: Allow superuser to skip that restriction on products closes ingadhoc/product#409 Signed-off-by: Nicolas Mac Rouillon <8d34fe7b7c65100e706828a8c0d03426900ffb59@adhoc.com.ar><commit_after>
############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products') or self.env.is_superuser(): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception)
############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products'): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception) [FIX] product_management_group: Allow superuser to skip that restriction on products closes ingadhoc/product#409 Signed-off-by: Nicolas Mac Rouillon <8d34fe7b7c65100e706828a8c0d03426900ffb59@adhoc.com.ar>############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products') or self.env.is_superuser(): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception)
<commit_before>############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products'): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception) <commit_msg>[FIX] product_management_group: Allow superuser to skip that restriction on products closes ingadhoc/product#409 Signed-off-by: Nicolas Mac Rouillon <8d34fe7b7c65100e706828a8c0d03426900ffb59@adhoc.com.ar><commit_after>############################################################################## # For copyright and license notices, see __openerp__.py file in module root # directory ############################################################################## from odoo import api, models, tools, exceptions, _ class IrModelAccess(models.Model): _inherit = 'ir.model.access' @api.model @tools.ormcache_context( 'self._uid', 'model', 'mode', 'raise_exception', keys=('lang',)) def check( self, model, mode='read', raise_exception=True): if isinstance(model, models.BaseModel): assert model._name == 'ir.model', 'Invalid model object' model_name = model.model else: model_name = model # we need to use this flag to know when the operation is from this modules if self._context.get('sale_quotation_products') or self._context.get('purchase_quotation_products') or self.env.is_superuser(): return True if mode != 'read' and model_name in [ 'product.template', 'product.product']: if self.env['res.users'].has_group( 'product_management_group.group_products_management'): return True elif raise_exception: raise exceptions.AccessError(_( "Sorry, you are not allowed to manage products." "Only users with 'Products Management' level are currently" " allowed to do that")) else: return False return super(IrModelAccess, self).check( model, mode=mode, raise_exception=raise_exception)
0f8729f378c385d15b079f8250b5910714418cf8
alembic/versions/2945717e3720_hide_user_numbers.py
alembic/versions/2945717e3720_hide_user_numbers.py
"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ###
"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ###
Fix brackets in the user number migration.
Fix brackets in the user number migration.
Python
agpl-3.0
MSPARP/newparp,MSPARP/newparp,MSPARP/newparp
"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ### Fix brackets in the user number migration.
"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ###
<commit_before>"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ### <commit_msg>Fix brackets in the user number migration.<commit_after>
"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ###
"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ### Fix brackets in the user number migration."""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ###
<commit_before>"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean()), nullable=False, server_default=True) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ### <commit_msg>Fix brackets in the user number migration.<commit_after>"""hide user numbers Revision ID: 2945717e3720 Revises: f8acbd22162 Create Date: 2016-01-31 00:43:02.777003 """ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table, column # revision identifiers, used by Alembic. revision = '2945717e3720' down_revision = 'f8acbd22162' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('chat_users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) op.add_column('users', sa.Column('show_user_numbers', sa.Boolean(), nullable=False, server_default=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'show_user_numbers') op.drop_column('chat_users', 'show_user_numbers') ### end Alembic commands ###
d3acc535faed84a65ca11a0be27302a8e5a9b798
minique/utils/redis_list.py
minique/utils/redis_list.py
from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = redis_conn.llen(key) - last_n else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size
from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = max(0, redis_conn.llen(key) - last_n) else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size
Fix negative offsets in read_list
Fix negative offsets in read_list
Python
mit
valohai/minique
from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = redis_conn.llen(key) - last_n else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size Fix negative offsets in read_list
from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = max(0, redis_conn.llen(key) - last_n) else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size
<commit_before>from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = redis_conn.llen(key) - last_n else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size <commit_msg>Fix negative offsets in read_list<commit_after>
from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = max(0, redis_conn.llen(key) - last_n) else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size
from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = redis_conn.llen(key) - last_n else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size Fix negative offsets in read_listfrom typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = max(0, redis_conn.llen(key) - last_n) else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size
<commit_before>from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = redis_conn.llen(key) - last_n else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size <commit_msg>Fix negative offsets in read_list<commit_after>from typing import Iterable, Optional from redis import Redis def read_list( redis_conn: Redis, key: str, *, chunk_size: int = 4096, last_n: Optional[int] = None ) -> Iterable[bytes]: """ Read a possibly large Redis list in chunks. Avoids OOMs on the Redis side. :param redis_conn: Redis connection :param key: Key :param chunk_size: How many lines to read per request. :param last_n: Attempt to only read the last N lines. :return: """ if not redis_conn.exists(key): return if chunk_size <= 0: chunk_size = 4096 if last_n and last_n > 0: offset = max(0, redis_conn.llen(key) - last_n) else: offset = 0 while offset < redis_conn.llen(key): # Regarding that - 1 there, see this from https://redis.io/commands/lrange: # > Note that if you have a list of numbers from 0 to 100, LRANGE list 0 10 # > will return 11 elements, that is, the rightmost item is included. chunk = redis_conn.lrange(key, offset, offset + chunk_size - 1) or [] if not chunk: break yield from chunk offset += chunk_size
e9819a5202e3f6520095f25260a33637c591263f
website/addons/github/views/repos.py
website/addons/github/views/repos.py
# -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, }
# -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name, auto_init=True) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, }
Set auto_init=True when creating new github repo through osf
Set auto_init=True when creating new github repo through osf
Python
apache-2.0
ticklemepierce/osf.io,danielneis/osf.io,brandonPurvis/osf.io,acshi/osf.io,caneruguz/osf.io,aaxelb/osf.io,chrisseto/osf.io,aaxelb/osf.io,Ghalko/osf.io,pattisdr/osf.io,reinaH/osf.io,mattclark/osf.io,lyndsysimon/osf.io,SSJohns/osf.io,erinspace/osf.io,jmcarp/osf.io,aaxelb/osf.io,wearpants/osf.io,petermalcolm/osf.io,fabianvf/osf.io,RomanZWang/osf.io,samanehsan/osf.io,jnayak1/osf.io,jolene-esposito/osf.io,doublebits/osf.io,reinaH/osf.io,njantrania/osf.io,cslzchen/osf.io,caneruguz/osf.io,ckc6cz/osf.io,HalcyonChimera/osf.io,sloria/osf.io,cslzchen/osf.io,mluke93/osf.io,lyndsysimon/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,caseyrygt/osf.io,sloria/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,billyhunt/osf.io,SSJohns/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,ticklemepierce/osf.io,mluo613/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,bdyetton/prettychart,MerlinZhang/osf.io,HarryRybacki/osf.io,billyhunt/osf.io,dplorimer/osf,cldershem/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,doublebits/osf.io,haoyuchen1992/osf.io,abought/osf.io,jeffreyliu3230/osf.io,arpitar/osf.io,brianjgeiger/osf.io,barbour-em/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,RomanZWang/osf.io,danielneis/osf.io,zachjanicki/osf.io,dplorimer/osf,laurenrevere/osf.io,mfraezz/osf.io,caseyrygt/osf.io,Nesiehr/osf.io,samanehsan/osf.io,KAsante95/osf.io,adlius/osf.io,doublebits/osf.io,rdhyee/osf.io,acshi/osf.io,bdyetton/prettychart,cwisecarver/osf.io,RomanZWang/osf.io,cosenal/osf.io,zamattiac/osf.io,Nesiehr/osf.io,jnayak1/osf.io,petermalcolm/osf.io,TomHeatwole/osf.io,hmoco/osf.io,HarryRybacki/osf.io,cwisecarver/osf.io,jmcarp/osf.io,mluke93/osf.io,Ghalko/osf.io,mluke93/osf.io,reinaH/osf.io,njantrania/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,samchrisinger/osf.io,asanfilippo7/osf.io,arpitar/osf.io,fabianvf/osf.io,mluke93/osf.io,pattisdr/osf.io,HarryRybacki/osf.io,petermalcolm/osf.io,aaxelb/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,TomHeatwole/osf.io,mattclark/osf.io,MerlinZhang/osf.io,chrisseto/osf.io,jnayak1/osf.io,samchrisinger/osf.io,doublebits/osf.io,KAsante95/osf.io,sbt9uc/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,TomBaxter/osf.io,arpitar/osf.io,pattisdr/osf.io,kch8qx/osf.io,ckc6cz/osf.io,mluo613/osf.io,zamattiac/osf.io,jinluyuan/osf.io,reinaH/osf.io,caneruguz/osf.io,KAsante95/osf.io,ckc6cz/osf.io,dplorimer/osf,njantrania/osf.io,emetsger/osf.io,GageGaskins/osf.io,kwierman/osf.io,amyshi188/osf.io,felliott/osf.io,erinspace/osf.io,jeffreyliu3230/osf.io,njantrania/osf.io,HarryRybacki/osf.io,haoyuchen1992/osf.io,asanfilippo7/osf.io,wearpants/osf.io,leb2dg/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,danielneis/osf.io,bdyetton/prettychart,hmoco/osf.io,barbour-em/osf.io,cwisecarver/osf.io,rdhyee/osf.io,jinluyuan/osf.io,samanehsan/osf.io,lyndsysimon/osf.io,alexschiller/osf.io,MerlinZhang/osf.io,felliott/osf.io,wearpants/osf.io,petermalcolm/osf.io,leb2dg/osf.io,binoculars/osf.io,jmcarp/osf.io,ticklemepierce/osf.io,acshi/osf.io,zachjanicki/osf.io,abought/osf.io,lyndsysimon/osf.io,jinluyuan/osf.io,CenterForOpenScience/osf.io,barbour-em/osf.io,doublebits/osf.io,barbour-em/osf.io,icereval/osf.io,laurenrevere/osf.io,mfraezz/osf.io,cldershem/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,cldershem/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,abought/osf.io,MerlinZhang/osf.io,kch8qx/osf.io,acshi/osf.io,kch8qx/osf.io,sloria/osf.io,erinspace/osf.io,kwierman/osf.io,icereval/osf.io,emetsger/osf.io,arpitar/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,GageGaskins/osf.io,zamattiac/osf.io,Nesiehr/osf.io,haoyuchen1992/osf.io,sbt9uc/osf.io,SSJohns/osf.io,hmoco/osf.io,amyshi188/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,amyshi188/osf.io,jolene-esposito/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,cslzchen/osf.io,mfraezz/osf.io,samchrisinger/osf.io,fabianvf/osf.io,mluo613/osf.io,crcresearch/osf.io,kwierman/osf.io,emetsger/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,billyhunt/osf.io,chrisseto/osf.io,mluo613/osf.io,saradbowman/osf.io,jolene-esposito/osf.io,dplorimer/osf,caseyrygt/osf.io,jeffreyliu3230/osf.io,kch8qx/osf.io,binoculars/osf.io,Ghalko/osf.io,cosenal/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,chennan47/osf.io,bdyetton/prettychart,laurenrevere/osf.io,ticklemepierce/osf.io,alexschiller/osf.io,samanehsan/osf.io,fabianvf/osf.io,GageGaskins/osf.io,Ghalko/osf.io,saradbowman/osf.io,abought/osf.io,Johnetordoff/osf.io,ckc6cz/osf.io,brianjgeiger/osf.io,cldershem/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,baylee-d/osf.io,felliott/osf.io,chennan47/osf.io,jinluyuan/osf.io,TomBaxter/osf.io,GageGaskins/osf.io,cosenal/osf.io,mattclark/osf.io,TomHeatwole/osf.io,jolene-esposito/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,billyhunt/osf.io,binoculars/osf.io,DanielSBrown/osf.io,adlius/osf.io,brianjgeiger/osf.io,adlius/osf.io,chennan47/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,danielneis/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,GageGaskins/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,kwierman/osf.io,cosenal/osf.io,felliott/osf.io,acshi/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,leb2dg/osf.io,jeffreyliu3230/osf.io,rdhyee/osf.io,billyhunt/osf.io,hmoco/osf.io,kch8qx/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,SSJohns/osf.io,RomanZWang/osf.io,adlius/osf.io,emetsger/osf.io
# -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, } Set auto_init=True when creating new github repo through osf
# -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name, auto_init=True) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, }
<commit_before># -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, } <commit_msg>Set auto_init=True when creating new github repo through osf<commit_after>
# -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name, auto_init=True) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, }
# -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, } Set auto_init=True when creating new github repo through osf# -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name, auto_init=True) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, }
<commit_before># -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, } <commit_msg>Set auto_init=True when creating new github repo through osf<commit_after># -*- coding: utf-8 -*- import httplib as http from flask import request from github3 import GitHubError from framework.exceptions import HTTPError from framework.auth.decorators import must_be_logged_in from website.project.decorators import must_have_addon from ..api import GitHub @must_be_logged_in @must_have_addon('github', 'user') def github_create_repo(**kwargs): repo_name = request.json.get('name') if not repo_name: raise HTTPError(http.BAD_REQUEST) user_settings = kwargs['user_addon'] connection = GitHub.from_settings(user_settings) try: repo = connection.create_repo(repo_name, auto_init=True) except GitHubError: # TODO: Check status code raise HTTPError(http.BAD_REQUEST) return { 'user': repo.owner.login, 'repo': repo.name, }
98c207ea262e500ea4f5c338a9bb5642047b24b7
alexandria/session.py
alexandria/session.py
from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000 ) config.set_session_factory(_session_factory)
from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000, timeout=864000, reissue_time=1200, ) config.set_session_factory(_session_factory)
Change the timeout, and reissue_time
Change the timeout, and reissue_time We want to make sure that the session never expires, since this also causes our CSRF token to expire, which causes issues...
Python
isc
cdunklau/alexandria,bertjwregeer/alexandria,cdunklau/alexandria,bertjwregeer/alexandria,cdunklau/alexandria
from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000 ) config.set_session_factory(_session_factory) Change the timeout, and reissue_time We want to make sure that the session never expires, since this also causes our CSRF token to expire, which causes issues...
from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000, timeout=864000, reissue_time=1200, ) config.set_session_factory(_session_factory)
<commit_before>from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000 ) config.set_session_factory(_session_factory) <commit_msg>Change the timeout, and reissue_time We want to make sure that the session never expires, since this also causes our CSRF token to expire, which causes issues...<commit_after>
from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000, timeout=864000, reissue_time=1200, ) config.set_session_factory(_session_factory)
from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000 ) config.set_session_factory(_session_factory) Change the timeout, and reissue_time We want to make sure that the session never expires, since this also causes our CSRF token to expire, which causes issues...from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000, timeout=864000, reissue_time=1200, ) config.set_session_factory(_session_factory)
<commit_before>from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000 ) config.set_session_factory(_session_factory) <commit_msg>Change the timeout, and reissue_time We want to make sure that the session never expires, since this also causes our CSRF token to expire, which causes issues...<commit_after>from pyramid.session import SignedCookieSessionFactory def includeme(config): # Create the session factory, we are using the stock one _session_factory = SignedCookieSessionFactory( config.registry.settings['pyramid.secret.session'], httponly=True, max_age=864000, timeout=864000, reissue_time=1200, ) config.set_session_factory(_session_factory)
12709ba84b4028f189d952208fff02cfb370d5c7
run.py
run.py
from cherrypy import wsgiserver from app import app import jinja2.ext if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) if __name__ == '__main__': try: server.start() except KeyboardInterrupt: server.stop()
from cherrypy import wsgiserver from app import app import jinja2.ext if __name__ == '__main__': if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) try: server.start() except KeyboardInterrupt: server.stop()
Fix control flow for main app entry
Fix control flow for main app entry
Python
mit
rtfoley/scorepy,rtfoley/scorepy,rtfoley/scorepy
from cherrypy import wsgiserver from app import app import jinja2.ext if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) if __name__ == '__main__': try: server.start() except KeyboardInterrupt: server.stop() Fix control flow for main app entry
from cherrypy import wsgiserver from app import app import jinja2.ext if __name__ == '__main__': if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) try: server.start() except KeyboardInterrupt: server.stop()
<commit_before>from cherrypy import wsgiserver from app import app import jinja2.ext if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) if __name__ == '__main__': try: server.start() except KeyboardInterrupt: server.stop() <commit_msg>Fix control flow for main app entry<commit_after>
from cherrypy import wsgiserver from app import app import jinja2.ext if __name__ == '__main__': if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) try: server.start() except KeyboardInterrupt: server.stop()
from cherrypy import wsgiserver from app import app import jinja2.ext if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) if __name__ == '__main__': try: server.start() except KeyboardInterrupt: server.stop() Fix control flow for main app entryfrom cherrypy import wsgiserver from app import app import jinja2.ext if __name__ == '__main__': if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) try: server.start() except KeyboardInterrupt: server.stop()
<commit_before>from cherrypy import wsgiserver from app import app import jinja2.ext if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) if __name__ == '__main__': try: server.start() except KeyboardInterrupt: server.stop() <commit_msg>Fix control flow for main app entry<commit_after>from cherrypy import wsgiserver from app import app import jinja2.ext if __name__ == '__main__': if app.debug: app.run('0.0.0.0', port=8080, debug=app.debug) else: # from http://flask.pocoo.org/snippets/24/ d = wsgiserver.WSGIPathInfoDispatcher({'/': app}) server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 8080), d) try: server.start() except KeyboardInterrupt: server.stop()
ce762aa7765238d9b792dd08d0a55345e27a31a1
invoice/invoice_print.py
invoice/invoice_print.py
from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.UserDetails, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response
from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.Profile, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response
Use updated user profile model for pdf output
Use updated user profile model for pdf output
Python
mit
pickleshb/PyInvoice,pickleshb/PyInvoice,pickleshb/PyInvoice,pickleshb/PyInvoice,pickleshb/PyInvoice
from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.UserDetails, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response Use updated user profile model for pdf output
from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.Profile, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response
<commit_before>from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.UserDetails, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response <commit_msg>Use updated user profile model for pdf output<commit_after>
from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.Profile, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response
from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.UserDetails, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response Use updated user profile model for pdf outputfrom django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.Profile, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response
<commit_before>from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.UserDetails, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response <commit_msg>Use updated user profile model for pdf output<commit_after>from django.views import generic from django.shortcuts import get_object_or_404 from django.template.loader import get_template from django.http import HttpResponse from z3c.rml import rml2pdf from invoice import models class InvoicePrint(generic.View): def get(self, request, pk): invoice = get_object_or_404(models.Invoice, pk=pk) user = get_object_or_404(models.Profile, pk=1) template = get_template('invoice_print.xml') context = { 'invoice': invoice, 'user': user, 'fonts': { 'roboto': { 'regular': 'static/fonts/roboto/Roboto-Thin.ttf', 'bold': 'static/fonts/roboto/Roboto-Light.ttf', } }, } rml = template.render(context) buf = rml2pdf.parseString(rml) response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = "filename=N%04d | %s.pdf" % (invoice.pk, invoice.company) response.write(buf.read()) return response
f7bd83ddabcad10beeeca9b1fc1e07631e68d4e0
src/models/c2w.py
src/models/c2w.py
from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') return Model(input=one_hots, output=s_E, name='W2C')
from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, Activation, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') #s_Eout = Activation('tanh')(s_E) return Model(input=one_hots, output=s_E, name='W2C')
Use tanh activation for word C2W embeddings
Use tanh activation for word C2W embeddings
Python
mit
milankinen/c2w2c,milankinen/c2w2c
from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') return Model(input=one_hots, output=s_E, name='W2C') Use tanh activation for word C2W embeddings
from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, Activation, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') #s_Eout = Activation('tanh')(s_E) return Model(input=one_hots, output=s_E, name='W2C')
<commit_before>from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') return Model(input=one_hots, output=s_E, name='W2C') <commit_msg>Use tanh activation for word C2W embeddings <commit_after>
from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, Activation, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') #s_Eout = Activation('tanh')(s_E) return Model(input=one_hots, output=s_E, name='W2C')
from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') return Model(input=one_hots, output=s_E, name='W2C') Use tanh activation for word C2W embeddings from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, Activation, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') #s_Eout = Activation('tanh')(s_E) return Model(input=one_hots, output=s_E, name='W2C')
<commit_before>from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') return Model(input=one_hots, output=s_E, name='W2C') <commit_msg>Use tanh activation for word C2W embeddings <commit_after>from keras.layers import LSTM, Input, Dense, TimeDistributed, Masking, Activation, merge from keras.models import Model from layers import Projection def C2W(params, V_C): one_hots = Input(shape=(params.maxlen, V_C.size), dtype='int8') c_E = TimeDistributed(Projection(params.d_C))(one_hots) # we want to preserve the state in case of padding so that the state # sequence s_Ef and s_Eb last values remain correct c_E_mask = Masking(mask_value=0.)(c_E) forward = LSTM(params.d_Wi, go_backwards=False, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) backwards = LSTM(params.d_Wi, go_backwards=True, dropout_U=0.1, dropout_W=0.1, consume_less='gpu')(c_E_mask) s_Ef = Dense(params.d_W)(forward) s_Eb = Dense(params.d_W)(backwards) s_E = merge(inputs=[s_Ef, s_Eb], mode='sum') #s_Eout = Activation('tanh')(s_E) return Model(input=one_hots, output=s_E, name='W2C')
0ef630bfee5a57ba2e5d487707249cdaa43ec63f
pseudorandom.py
pseudorandom.py
#!/usr/bin/env python import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
#!/usr/bin/env python import os from flask import Flask, render_template, request, make_response from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if (request.headers.get('User-Agent', '')[:4].lower() == 'curl' or request.headers['Content-Type'] == 'text/plain'): return make_response((u"{0}\n".format(get_full_name()), 200, {'Content-Type': 'text/plain'})) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
Send text response when Content-Type is text/plain
Send text response when Content-Type is text/plain
Python
mit
treyhunner/pseudorandom.name,treyhunner/pseudorandom.name
#!/usr/bin/env python import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) Send text response when Content-Type is text/plain
#!/usr/bin/env python import os from flask import Flask, render_template, request, make_response from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if (request.headers.get('User-Agent', '')[:4].lower() == 'curl' or request.headers['Content-Type'] == 'text/plain'): return make_response((u"{0}\n".format(get_full_name()), 200, {'Content-Type': 'text/plain'})) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
<commit_before>#!/usr/bin/env python import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) <commit_msg>Send text response when Content-Type is text/plain<commit_after>
#!/usr/bin/env python import os from flask import Flask, render_template, request, make_response from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if (request.headers.get('User-Agent', '')[:4].lower() == 'curl' or request.headers['Content-Type'] == 'text/plain'): return make_response((u"{0}\n".format(get_full_name()), 200, {'Content-Type': 'text/plain'})) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
#!/usr/bin/env python import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) Send text response when Content-Type is text/plain#!/usr/bin/env python import os from flask import Flask, render_template, request, make_response from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if (request.headers.get('User-Agent', '')[:4].lower() == 'curl' or request.headers['Content-Type'] == 'text/plain'): return make_response((u"{0}\n".format(get_full_name()), 200, {'Content-Type': 'text/plain'})) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
<commit_before>#!/usr/bin/env python import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) <commit_msg>Send text response when Content-Type is text/plain<commit_after>#!/usr/bin/env python import os from flask import Flask, render_template, request, make_response from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if (request.headers.get('User-Agent', '')[:4].lower() == 'curl' or request.headers['Content-Type'] == 'text/plain'): return make_response((u"{0}\n".format(get_full_name()), 200, {'Content-Type': 'text/plain'})) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
8bea1001922da415be24363e6fca677171c69f70
guild/commands/shell_impl.py
guild/commands/shell_impl.py
# Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import os import subprocess import sys import guild.util def main(_args): env = guild.util.safe_osenv() env["PYTHONPATH"] = os.path.pathsep.join(sys.path) p = subprocess.Popen([sys.executable, "-i"], env=env) p.communicate()
# Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import code def main(_args): code.interact(local=locals())
Drop into repl without starting new Python process for shell cmd
Drop into repl without starting new Python process for shell cmd
Python
apache-2.0
guildai/guild,guildai/guild,guildai/guild,guildai/guild
# Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import os import subprocess import sys import guild.util def main(_args): env = guild.util.safe_osenv() env["PYTHONPATH"] = os.path.pathsep.join(sys.path) p = subprocess.Popen([sys.executable, "-i"], env=env) p.communicate() Drop into repl without starting new Python process for shell cmd
# Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import code def main(_args): code.interact(local=locals())
<commit_before># Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import os import subprocess import sys import guild.util def main(_args): env = guild.util.safe_osenv() env["PYTHONPATH"] = os.path.pathsep.join(sys.path) p = subprocess.Popen([sys.executable, "-i"], env=env) p.communicate() <commit_msg>Drop into repl without starting new Python process for shell cmd<commit_after>
# Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import code def main(_args): code.interact(local=locals())
# Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import os import subprocess import sys import guild.util def main(_args): env = guild.util.safe_osenv() env["PYTHONPATH"] = os.path.pathsep.join(sys.path) p = subprocess.Popen([sys.executable, "-i"], env=env) p.communicate() Drop into repl without starting new Python process for shell cmd# Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import code def main(_args): code.interact(local=locals())
<commit_before># Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import os import subprocess import sys import guild.util def main(_args): env = guild.util.safe_osenv() env["PYTHONPATH"] = os.path.pathsep.join(sys.path) p = subprocess.Popen([sys.executable, "-i"], env=env) p.communicate() <commit_msg>Drop into repl without starting new Python process for shell cmd<commit_after># Copyright 2017-2018 TensorHub, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import from __future__ import division import code def main(_args): code.interact(local=locals())
039c552b3674531a746c14d1c34bd2f13fd078e5
Cura/util/removableStorage.py
Cura/util/removableStorage.py
import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: drives.append(letter + ':/') bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append(volume) else: for volume in glob.glob('/media/*'): drives.append(volume) return drives
import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll import ctypes bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: volumeName = '' nameBuffer = ctypes.create_unicode_buffer(1024) if windll.kernel32.GetVolumeInformationW(ctypes.c_wchar_p(letter + ':/'), nameBuffer, ctypes.sizeof(nameBuffer), None, None, None, None, 0) == 0: volumeName = nameBuffer.value if volumeName == '': volumeName = 'NO NAME' drives.append(('%s (%s:)' % (volumeName, letter), letter + ':/', volumeName)) bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append((os.path.basename(volume), os.path.basename(volume), volume)) else: for volume in glob.glob('/media/*'): drives.append((os.path.basename(volume), os.path.basename(volume), volume)) return drives
Enhance the SD card list with more info.
Enhance the SD card list with more info.
Python
agpl-3.0
alephobjects/Cura,alephobjects/Cura,alephobjects/Cura
import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: drives.append(letter + ':/') bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append(volume) else: for volume in glob.glob('/media/*'): drives.append(volume) return drives Enhance the SD card list with more info.
import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll import ctypes bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: volumeName = '' nameBuffer = ctypes.create_unicode_buffer(1024) if windll.kernel32.GetVolumeInformationW(ctypes.c_wchar_p(letter + ':/'), nameBuffer, ctypes.sizeof(nameBuffer), None, None, None, None, 0) == 0: volumeName = nameBuffer.value if volumeName == '': volumeName = 'NO NAME' drives.append(('%s (%s:)' % (volumeName, letter), letter + ':/', volumeName)) bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append((os.path.basename(volume), os.path.basename(volume), volume)) else: for volume in glob.glob('/media/*'): drives.append((os.path.basename(volume), os.path.basename(volume), volume)) return drives
<commit_before>import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: drives.append(letter + ':/') bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append(volume) else: for volume in glob.glob('/media/*'): drives.append(volume) return drives <commit_msg>Enhance the SD card list with more info.<commit_after>
import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll import ctypes bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: volumeName = '' nameBuffer = ctypes.create_unicode_buffer(1024) if windll.kernel32.GetVolumeInformationW(ctypes.c_wchar_p(letter + ':/'), nameBuffer, ctypes.sizeof(nameBuffer), None, None, None, None, 0) == 0: volumeName = nameBuffer.value if volumeName == '': volumeName = 'NO NAME' drives.append(('%s (%s:)' % (volumeName, letter), letter + ':/', volumeName)) bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append((os.path.basename(volume), os.path.basename(volume), volume)) else: for volume in glob.glob('/media/*'): drives.append((os.path.basename(volume), os.path.basename(volume), volume)) return drives
import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: drives.append(letter + ':/') bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append(volume) else: for volume in glob.glob('/media/*'): drives.append(volume) return drives Enhance the SD card list with more info.import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll import ctypes bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: volumeName = '' nameBuffer = ctypes.create_unicode_buffer(1024) if windll.kernel32.GetVolumeInformationW(ctypes.c_wchar_p(letter + ':/'), nameBuffer, ctypes.sizeof(nameBuffer), None, None, None, None, 0) == 0: volumeName = nameBuffer.value if volumeName == '': volumeName = 'NO NAME' drives.append(('%s (%s:)' % (volumeName, letter), letter + ':/', volumeName)) bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append((os.path.basename(volume), os.path.basename(volume), volume)) else: for volume in glob.glob('/media/*'): drives.append((os.path.basename(volume), os.path.basename(volume), volume)) return drives
<commit_before>import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: drives.append(letter + ':/') bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append(volume) else: for volume in glob.glob('/media/*'): drives.append(volume) return drives <commit_msg>Enhance the SD card list with more info.<commit_after>import platform import string import glob import os import stat def getPossibleSDcardDrives(): drives = [] if platform.system() == "Windows": from ctypes import windll import ctypes bitmask = windll.kernel32.GetLogicalDrives() for letter in string.uppercase: if bitmask & 1 and windll.kernel32.GetDriveTypeA(letter + ':/') == 2: volumeName = '' nameBuffer = ctypes.create_unicode_buffer(1024) if windll.kernel32.GetVolumeInformationW(ctypes.c_wchar_p(letter + ':/'), nameBuffer, ctypes.sizeof(nameBuffer), None, None, None, None, 0) == 0: volumeName = nameBuffer.value if volumeName == '': volumeName = 'NO NAME' drives.append(('%s (%s:)' % (volumeName, letter), letter + ':/', volumeName)) bitmask >>= 1 elif platform.system() == "Darwin": for volume in glob.glob('/Volumes/*'): if stat.S_ISLNK(os.lstat(volume).st_mode): continue #'Ejectable: Yes' in os.system('diskutil info \'%s\'' % (volume)) drives.append((os.path.basename(volume), os.path.basename(volume), volume)) else: for volume in glob.glob('/media/*'): drives.append((os.path.basename(volume), os.path.basename(volume), volume)) return drives
80d710269ff1d6421f4a29b9a0d424868cb5ec54
flaskiwsapp/auth/jwt.py
flaskiwsapp/auth/jwt.py
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify def authenticate(email, password): user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR def authenticate(email, password): try: user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user except UserDoesnotExistsException as e: raise JWTError(error=str(type(e)), description=e.message) def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers elif isinstance(e, Exception): error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) error.update({'title': str(type(e))}) error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
Raise JWTErrror into authentication handler
Raise JWTErrror into authentication handler
Python
mit
rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel,rafasis1986/EngineeringMidLevel
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify def authenticate(email, password): user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers Raise JWTErrror into authentication handler
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR def authenticate(email, password): try: user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user except UserDoesnotExistsException as e: raise JWTError(error=str(type(e)), description=e.message) def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers elif isinstance(e, Exception): error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) error.update({'title': str(type(e))}) error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
<commit_before>from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify def authenticate(email, password): user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers <commit_msg>Raise JWTErrror into authentication handler<commit_after>
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR def authenticate(email, password): try: user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user except UserDoesnotExistsException as e: raise JWTError(error=str(type(e)), description=e.message) def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers elif isinstance(e, Exception): error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) error.update({'title': str(type(e))}) error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify def authenticate(email, password): user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers Raise JWTErrror into authentication handlerfrom flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR def authenticate(email, password): try: user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user except UserDoesnotExistsException as e: raise JWTError(error=str(type(e)), description=e.message) def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers elif isinstance(e, Exception): error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) error.update({'title': str(type(e))}) error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
<commit_before>from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify def authenticate(email, password): user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers <commit_msg>Raise JWTErrror into authentication handler<commit_after>from flaskiwsapp.users.controllers import get_user_by_id, get_user_by_email from flaskiwsapp.snippets.customApi import DUMMY_ERROR_CODE from flask_jwt import JWTError from flask import jsonify from flaskiwsapp.snippets.exceptions.userExceptions import UserDoesnotExistsException from flask_api.status import HTTP_500_INTERNAL_SERVER_ERROR def authenticate(email, password): try: user = get_user_by_email(email) if user and user.check_password(password) and user.is_active: return user except UserDoesnotExistsException as e: raise JWTError(error=str(type(e)), description=e.message) def identity(payload): user_id = payload['identity'] return get_user_by_id(user_id) def error_handler(e): response_dict = {'data': {}} error = {} if isinstance(e, JWTError): error.update({'status': e.status_code}) error.update({'title': e.error}) error.update({'detail': 'Auth Failed: {}'.format(e.description)}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers elif isinstance(e, Exception): error.update({'status': HTTP_500_INTERNAL_SERVER_ERROR}) error.update({'title': str(type(e))}) error.update({'detail': 'Auth Failed: {}'.format(e.args[0])}) error.update({'code': DUMMY_ERROR_CODE}) response_dict['data'] = error return jsonify(response_dict), e.status_code, e.headers
45c773c0d2c90a57949a758ab3ac5c15e2942528
resource_mgt.py
resource_mgt.py
"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close()
"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close() def words_per_line(flo): return [len(line.split()) for line in flo.readlines()]
Add a words per line function
Add a words per line function
Python
mit
kentoj/python-fundamentals
"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close()Add a words per line function
"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close() def words_per_line(flo): return [len(line.split()) for line in flo.readlines()]
<commit_before>"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close()<commit_msg>Add a words per line function<commit_after>
"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close() def words_per_line(flo): return [len(line.split()) for line in flo.readlines()]
"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close()Add a words per line function"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close() def words_per_line(flo): return [len(line.split()) for line in flo.readlines()]
<commit_before>"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close()<commit_msg>Add a words per line function<commit_after>"""Class to show file manipulations""" import sys original_file = open('wasteland.txt', mode='rt', encoding='utf-8') file_to_write = open('wasteland-copy.txt', mode='wt', encoding='utf-8') file_to_write.write("What are the roots that clutch, ") file_to_write.write('what branches grow\n') file_to_write.close() file_reading = open('wasteland.txt', mode='rt', encoding='utf-8') for line in file_reading.readlines(): print(line) file_to_append = open('wasteland-copy.txt', mode='at', encoding='utf-8') file_to_append.writelines( ['Son of man,\n', 'You cannot say, or guess, ', 'for you know only,\n', 'A heap of broken images, ', 'where the sun beats\n']) file_to_append.close() def words_per_line(flo): return [len(line.split()) for line in flo.readlines()]
e094c15e97ea1d6c677ed52a26ae37409346d29f
notifications/tests/settings.py
notifications/tests/settings.py
SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations()
SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations()
Fix a RemovedInDjango110Warning in unittest
Fix a RemovedInDjango110Warning in unittest
Python
bsd-3-clause
lukeburden/django-notifications,django-notifications/django-notifications,Evidlo/django-notifications,Evidlo/django-notifications,django-notifications/django-notifications,lukeburden/django-notifications,lukeburden/django-notifications,zhang-z/django-notifications,LegoStormtroopr/django-notifications,zhang-z/django-notifications,iberben/django-notifications,django-notifications/django-notifications,Evidlo/django-notifications,zhang-z/django-notifications,LegoStormtroopr/django-notifications,iberben/django-notifications,iberben/django-notifications,LegoStormtroopr/django-notifications
SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations() Fix a RemovedInDjango110Warning in unittest
SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations()
<commit_before>SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations() <commit_msg>Fix a RemovedInDjango110Warning in unittest<commit_after>
SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations()
SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations() Fix a RemovedInDjango110Warning in unittestSECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations()
<commit_before>SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations() <commit_msg>Fix a RemovedInDjango110Warning in unittest<commit_after>SECRET_KEY = 'secret_key' TESTING = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', } } MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'notifications', ) ROOT_URLCONF = 'notifications.tests.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] # Need to skip migrations for now as migrations created with python2 break with python3 # See https://code.djangoproject.com/ticket/23455 class DisableMigrations(object): def __contains__(self, item): return True def __getitem__(self, item): return "notmigrations" MIGRATION_MODULES = DisableMigrations()
f0bd7658b961daceaace56e4ada415c5c9410d54
UM/Operations/ScaleToBoundsOperation.py
UM/Operations/ScaleToBoundsOperation.py
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) else: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale)
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.depth: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) elif largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale)
Check depth before width since that is more likely to be the smaller dimension
Check depth before width since that is more likely to be the smaller dimension Contributes to Asana issue 37107676459484
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) else: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale) Check depth before width since that is more likely to be the smaller dimension Contributes to Asana issue 37107676459484
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.depth: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) elif largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale)
<commit_before># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) else: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale) <commit_msg>Check depth before width since that is more likely to be the smaller dimension Contributes to Asana issue 37107676459484<commit_after>
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.depth: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) elif largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale)
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) else: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale) Check depth before width since that is more likely to be the smaller dimension Contributes to Asana issue 37107676459484# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.depth: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) elif largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale)
<commit_before># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) else: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale) <commit_msg>Check depth before width since that is more likely to be the smaller dimension Contributes to Asana issue 37107676459484<commit_after># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from UM.Operations.Operation import Operation from UM.Math.Vector import Vector ## Operation subclass that will scale a node to fit within the bounds provided. class ScaleToBoundsOperation(Operation): def __init__(self, node, bounds): super().__init__() self._node = node self._old_scale = node.getScale() bbox = self._node.getBoundingBox() largest_dimension = max(bbox.width, bbox.height, bbox.depth) scale_factor = 1.0 if largest_dimension == bbox.depth: scale_factor = self._old_scale.z * (bounds.depth / bbox.depth) elif largest_dimension == bbox.width: scale_factor = self._old_scale.x * (bounds.width / bbox.width) elif largest_dimension == bbox.height: scale_factor = self._old_scale.y * (bounds.height / bbox.height) self._new_scale = Vector(scale_factor, scale_factor, scale_factor) def undo(self): self._node.setScale(self._old_scale) def redo(self): self._node.setPosition(Vector(0, 0, 0)) self._node.setScale(self._new_scale)
0e26a5764888a53859a5362afeaaccbd36ca62ac
gaesecure/decorators.py
gaesecure/decorators.py
from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Cron requests only.") return view_func(*args, **kwargs) return new_view
from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Admin users only.") return view_func(*args, **kwargs) return new_view
Fix copy-paste-o in exception message.
Fix copy-paste-o in exception message.
Python
mit
adamalton/django-gae-secure
from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Cron requests only.") return view_func(*args, **kwargs) return new_view Fix copy-paste-o in exception message.
from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Admin users only.") return view_func(*args, **kwargs) return new_view
<commit_before>from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Cron requests only.") return view_func(*args, **kwargs) return new_view <commit_msg>Fix copy-paste-o in exception message.<commit_after>
from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Admin users only.") return view_func(*args, **kwargs) return new_view
from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Cron requests only.") return view_func(*args, **kwargs) return new_view Fix copy-paste-o in exception message.from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Admin users only.") return view_func(*args, **kwargs) return new_view
<commit_before>from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Cron requests only.") return view_func(*args, **kwargs) return new_view <commit_msg>Fix copy-paste-o in exception message.<commit_after>from functools import wraps from django.http import HttpResponseForbidden from google.appengine.api.users import is_current_user_admin def task_queue_only(view_func): """ View decorator that only allows requests which originate from the App Engine task queue. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_QUEUENAME"): return HttpResponseForbidden("Task queue requests only.") return view_func(request, *args, **kwargs) return new_view def cron_only(view_func): """ View decorator that only allows requests which originate from an App Engine cron. """ @wraps(view_func) def new_view(request, *args, **kwargs): if not request.META.get("X_APPENGINE_CRON"): return HttpResponseForbidden("Cron requests only.") return view_func(request, *args, **kwargs) return new_view def gae_admin_only(view_func): """ View decorator that requires the user to be an administrator of the App Engine app. """ @wraps(view_func) def new_view(*args, **kwargs): if not is_current_user_admin(): return HttpResponseForbidden("Admin users only.") return view_func(*args, **kwargs) return new_view
3f31234454949e7dca3b91d9884568da57ab9fcd
conftest.py
conftest.py
import pytest import json import os.path from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json")
import pytest import json import os.path import importlib import jsonpickle from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") def pytest_generate_tests(metafunc): for fixture in metafunc.fixturenames: if fixture.startswith("data_"): testdata = load_from_module(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) elif fixture.startswith("json_"): testdata = load_from_json(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) def load_from_module(module): return importlib.import_module("data.%s" % module).testdata def load_from_json(file): with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f: return jsonpickle.decode(f.read())
Add test data loading from file and test data parametrization
Add test data loading from file and test data parametrization
Python
apache-2.0
ujilia/python_training2,ujilia/python_training2,ujilia/python_training2
import pytest import json import os.path from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") Add test data loading from file and test data parametrization
import pytest import json import os.path import importlib import jsonpickle from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") def pytest_generate_tests(metafunc): for fixture in metafunc.fixturenames: if fixture.startswith("data_"): testdata = load_from_module(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) elif fixture.startswith("json_"): testdata = load_from_json(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) def load_from_module(module): return importlib.import_module("data.%s" % module).testdata def load_from_json(file): with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f: return jsonpickle.decode(f.read())
<commit_before>import pytest import json import os.path from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") <commit_msg>Add test data loading from file and test data parametrization<commit_after>
import pytest import json import os.path import importlib import jsonpickle from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") def pytest_generate_tests(metafunc): for fixture in metafunc.fixturenames: if fixture.startswith("data_"): testdata = load_from_module(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) elif fixture.startswith("json_"): testdata = load_from_json(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) def load_from_module(module): return importlib.import_module("data.%s" % module).testdata def load_from_json(file): with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f: return jsonpickle.decode(f.read())
import pytest import json import os.path from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") Add test data loading from file and test data parametrizationimport pytest import json import os.path import importlib import jsonpickle from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") def pytest_generate_tests(metafunc): for fixture in metafunc.fixturenames: if fixture.startswith("data_"): testdata = load_from_module(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) elif fixture.startswith("json_"): testdata = load_from_json(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) def load_from_module(module): return importlib.import_module("data.%s" % module).testdata def load_from_json(file): with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f: return jsonpickle.decode(f.read())
<commit_before>import pytest import json import os.path from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") <commit_msg>Add test data loading from file and test data parametrization<commit_after>import pytest import json import os.path import importlib import jsonpickle from fixture.application import Application fixture = None target = None @pytest.fixture def app(request): global fixture global target browser = request.config.getoption("--browser") if target is None: config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption("--target")) with open(config_file) as f: target = json.load(f) if fixture is None or not fixture.is_valid(): fixture = Application(browser=browser, base_url=target['baseUrl']) fixture.session.ensure_login(username=target['username'], password=target['password']) return fixture @pytest.fixture(scope="session", autouse=True) def stop(request): def fin(): fixture.session.ensure_logout() fixture.destroy() request.addfinalizer(fin) return fixture def pytest_addoption(parser): parser.addoption("--browser", action="store", default="firefox") parser.addoption("--target", action="store", default="target.json") def pytest_generate_tests(metafunc): for fixture in metafunc.fixturenames: if fixture.startswith("data_"): testdata = load_from_module(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) elif fixture.startswith("json_"): testdata = load_from_json(fixture[5:]) metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata]) def load_from_module(module): return importlib.import_module("data.%s" % module).testdata def load_from_json(file): with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "data/%s.json" % file)) as f: return jsonpickle.decode(f.read())
f3986b1c86d4fc8b0fa52fd2e7221b8d6a5e3cab
system/protocols/capabilities.py
system/protocols/capabilities.py
# coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: **MULTILINE_MESSAGE**: Messages can contain line-breaks **MULTIPLE_CHANNELS**: Protocol supports the concept of separate channels **MULTIPLE_CHANNELS_JOINED**: Protocol may be in more than one channel at once **VOICE**: Protocol support voice/audio communication **MESSAGE_UNJOINED_CHANNELS**: Protocol is able to send messages to channels that it hasn't joined **INDEPENDENT_VOICE_CHANNELS**: Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5
# coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: MULTILINE_MESSAGE Messages can contain line-breaks MULTIPLE_CHANNELS Protocol supports the concept of separate channels MULTIPLE_CHANNELS_JOINED Protocol may be in more than one channel at once VOICE Protocol supports voice/audio communication MESSAGE_UNJOINED_CHANNELS Protocol is able to send messages to channels that it hasn't joined INDEPENDENT_VOICE_CHANNELS Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5
Fix doc formatting; wasn't expecting <pre>
[Capabilities] Fix doc formatting; wasn't expecting <pre>
Python
artistic-2.0
UltrosBot/Ultros,UltrosBot/Ultros
# coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: **MULTILINE_MESSAGE**: Messages can contain line-breaks **MULTIPLE_CHANNELS**: Protocol supports the concept of separate channels **MULTIPLE_CHANNELS_JOINED**: Protocol may be in more than one channel at once **VOICE**: Protocol support voice/audio communication **MESSAGE_UNJOINED_CHANNELS**: Protocol is able to send messages to channels that it hasn't joined **INDEPENDENT_VOICE_CHANNELS**: Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5 [Capabilities] Fix doc formatting; wasn't expecting <pre>
# coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: MULTILINE_MESSAGE Messages can contain line-breaks MULTIPLE_CHANNELS Protocol supports the concept of separate channels MULTIPLE_CHANNELS_JOINED Protocol may be in more than one channel at once VOICE Protocol supports voice/audio communication MESSAGE_UNJOINED_CHANNELS Protocol is able to send messages to channels that it hasn't joined INDEPENDENT_VOICE_CHANNELS Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5
<commit_before># coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: **MULTILINE_MESSAGE**: Messages can contain line-breaks **MULTIPLE_CHANNELS**: Protocol supports the concept of separate channels **MULTIPLE_CHANNELS_JOINED**: Protocol may be in more than one channel at once **VOICE**: Protocol support voice/audio communication **MESSAGE_UNJOINED_CHANNELS**: Protocol is able to send messages to channels that it hasn't joined **INDEPENDENT_VOICE_CHANNELS**: Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5 <commit_msg>[Capabilities] Fix doc formatting; wasn't expecting <pre><commit_after>
# coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: MULTILINE_MESSAGE Messages can contain line-breaks MULTIPLE_CHANNELS Protocol supports the concept of separate channels MULTIPLE_CHANNELS_JOINED Protocol may be in more than one channel at once VOICE Protocol supports voice/audio communication MESSAGE_UNJOINED_CHANNELS Protocol is able to send messages to channels that it hasn't joined INDEPENDENT_VOICE_CHANNELS Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5
# coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: **MULTILINE_MESSAGE**: Messages can contain line-breaks **MULTIPLE_CHANNELS**: Protocol supports the concept of separate channels **MULTIPLE_CHANNELS_JOINED**: Protocol may be in more than one channel at once **VOICE**: Protocol support voice/audio communication **MESSAGE_UNJOINED_CHANNELS**: Protocol is able to send messages to channels that it hasn't joined **INDEPENDENT_VOICE_CHANNELS**: Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5 [Capabilities] Fix doc formatting; wasn't expecting <pre># coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: MULTILINE_MESSAGE Messages can contain line-breaks MULTIPLE_CHANNELS Protocol supports the concept of separate channels MULTIPLE_CHANNELS_JOINED Protocol may be in more than one channel at once VOICE Protocol supports voice/audio communication MESSAGE_UNJOINED_CHANNELS Protocol is able to send messages to channels that it hasn't joined INDEPENDENT_VOICE_CHANNELS Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5
<commit_before># coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: **MULTILINE_MESSAGE**: Messages can contain line-breaks **MULTIPLE_CHANNELS**: Protocol supports the concept of separate channels **MULTIPLE_CHANNELS_JOINED**: Protocol may be in more than one channel at once **VOICE**: Protocol support voice/audio communication **MESSAGE_UNJOINED_CHANNELS**: Protocol is able to send messages to channels that it hasn't joined **INDEPENDENT_VOICE_CHANNELS**: Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5 <commit_msg>[Capabilities] Fix doc formatting; wasn't expecting <pre><commit_after># coding=utf-8 from enum import Enum, unique __author__ = 'Sean' __all__ = ["Capabilities"] @unique class Capabilities(Enum): """ An enum containing constants to declare what a protocol is capable of You can use *protocol.get_capabilities()* or *protocol.has_capability(cap)* to get all of a protocol's capabilities or check whether it has a specific one respectively. The current capabilities we have are as follows: MULTILINE_MESSAGE Messages can contain line-breaks MULTIPLE_CHANNELS Protocol supports the concept of separate channels MULTIPLE_CHANNELS_JOINED Protocol may be in more than one channel at once VOICE Protocol supports voice/audio communication MESSAGE_UNJOINED_CHANNELS Protocol is able to send messages to channels that it hasn't joined INDEPENDENT_VOICE_CHANNELS Voice and text channels are separate; can't send text to a voice channel and vice-versa """ #: Messages can contain linebreaks MULTILINE_MESSAGE = 0 #: Protocol uses channels #: (rather than a single "channel" for the whole protocol) MULTIPLE_CHANNELS = 1 #: The protocol can be in more than one channel at a time MULTIPLE_CHANNELS_JOINED = 2 #: Voice communication support VOICE = 3 #: Able to send messages to channels the protocol isn't in MESSAGE_UNJOINED_CHANNELS = 4 #: Voice and text channels are separate; #: can't send text to voice and vice versa INDEPENDENT_VOICE_CHANNELS = 5
1a709d7240c0bbc1b1eab3c8803ed55b6657ec97
serializers/json_serializer.py
serializers/json_serializer.py
from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e)
from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base_serializer as base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e)
Fix import in the json serializer
Fix import in the json serializer
Python
mit
danrex/django-riv,danrex/django-riv
from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e) Fix import in the json serializer
from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base_serializer as base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e)
<commit_before>from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e) <commit_msg>Fix import in the json serializer<commit_after>
from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base_serializer as base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e)
from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e) Fix import in the json serializerfrom StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base_serializer as base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e)
<commit_before>from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e) <commit_msg>Fix import in the json serializer<commit_after>from StringIO import StringIO from django.utils import simplejson from django.core.serializers.json import DjangoJSONEncoder, DateTimeAwareJSONEncoder from django.core.serializers.base import DeserializationError from riv.serializers import base_serializer as base class Serializer(base.Serializer): internal_use_only = False def get_loader(self): return Loader def end_serialization(self): super(Serializer, self).end_serialization() simplejson.dump(self.objects, self.stream, cls=DjangoJSONEncoder, **self.options) def getvalue(self): if callable(getattr(self.stream, 'getvalue', None)): return self.stream.getvalue() class Loader(base.Loader): def pre_loading(self): if isinstance(self.data, basestring): stream = StringIO(self.data) else: stream = self.data try: self.objects = simplejson.load(stream) except Exception, e: raise base.LoadingError(e) def Deserializer(stream_or_string, **options): """ Deserialize a stream or string of JSON data. """ if isinstance(stream_or_string, basestring): stream = StringIO(stream_or_string) else: stream = stream_or_string try: for obj in base.Deserializer(simplejson.load(stream), **options): yield obj except GeneratorExit: raise except Exception, e: # Map to deserializer error raise DeserializationError(e)
915f0c820de5e6bbc49a36103ab7ae7065897c67
flask_tuktuk/commands.py
flask_tuktuk/commands.py
# coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from .helpers import pycharm TukTukCommand = Manager() @TukTukCommand.command def build_helpers(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(jsl.registry.iter_documents()) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename))
# coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from werkzeug.serving import run_with_reloader from .helpers import pycharm TukTukCommand = Manager() def _build_helpers(app): """:type app: flask.Flask""" with app.app_context(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(dict((cls.__name__, cls) for cls in jsl.registry.iter_documents())) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename)) @TukTukCommand.command def build_helpers(watch=False): app = current_app._get_current_object() if watch: # run_with_reloader seems to be internal API and is intended # for reloading webserver, but will do for now as a quick solution # TODO: rewrite run_with_reloader(lambda: _build_helpers(app)) else: _build_helpers(app)
Implement --watch key for build_helpers command
Implement --watch key for build_helpers command
Python
bsd-3-clause
aromanovich/flask-tuktuk,aromanovich/flask-tuktuk
# coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from .helpers import pycharm TukTukCommand = Manager() @TukTukCommand.command def build_helpers(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(jsl.registry.iter_documents()) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename))Implement --watch key for build_helpers command
# coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from werkzeug.serving import run_with_reloader from .helpers import pycharm TukTukCommand = Manager() def _build_helpers(app): """:type app: flask.Flask""" with app.app_context(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(dict((cls.__name__, cls) for cls in jsl.registry.iter_documents())) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename)) @TukTukCommand.command def build_helpers(watch=False): app = current_app._get_current_object() if watch: # run_with_reloader seems to be internal API and is intended # for reloading webserver, but will do for now as a quick solution # TODO: rewrite run_with_reloader(lambda: _build_helpers(app)) else: _build_helpers(app)
<commit_before># coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from .helpers import pycharm TukTukCommand = Manager() @TukTukCommand.command def build_helpers(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(jsl.registry.iter_documents()) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename))<commit_msg>Implement --watch key for build_helpers command<commit_after>
# coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from werkzeug.serving import run_with_reloader from .helpers import pycharm TukTukCommand = Manager() def _build_helpers(app): """:type app: flask.Flask""" with app.app_context(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(dict((cls.__name__, cls) for cls in jsl.registry.iter_documents())) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename)) @TukTukCommand.command def build_helpers(watch=False): app = current_app._get_current_object() if watch: # run_with_reloader seems to be internal API and is intended # for reloading webserver, but will do for now as a quick solution # TODO: rewrite run_with_reloader(lambda: _build_helpers(app)) else: _build_helpers(app)
# coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from .helpers import pycharm TukTukCommand = Manager() @TukTukCommand.command def build_helpers(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(jsl.registry.iter_documents()) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename))Implement --watch key for build_helpers command# coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from werkzeug.serving import run_with_reloader from .helpers import pycharm TukTukCommand = Manager() def _build_helpers(app): """:type app: flask.Flask""" with app.app_context(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(dict((cls.__name__, cls) for cls in jsl.registry.iter_documents())) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename)) @TukTukCommand.command def build_helpers(watch=False): app = current_app._get_current_object() if watch: # run_with_reloader seems to be internal API and is intended # for reloading webserver, but will do for now as a quick solution # TODO: rewrite run_with_reloader(lambda: _build_helpers(app)) else: _build_helpers(app)
<commit_before># coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from .helpers import pycharm TukTukCommand = Manager() @TukTukCommand.command def build_helpers(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(jsl.registry.iter_documents()) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename))<commit_msg>Implement --watch key for build_helpers command<commit_after># coding: utf-8 from __future__ import unicode_literals, print_function import os import pkgutil import jsl from flask import current_app from flask.ext.script import Manager from werkzeug.serving import run_with_reloader from .helpers import pycharm TukTukCommand = Manager() def _build_helpers(app): """:type app: flask.Flask""" with app.app_context(): if 'TUKTUK_HELPERS_MODULE' not in current_app.config: raise Exception('Config variable TUKTUK_HELPERS_MODULE is not set.') helpers_module = current_app.config['TUKTUK_HELPERS_MODULE'] package = pkgutil.get_loader(helpers_module) if package is None: raise Exception('Module "{0}" does not exist. ' 'Please create it before running the command.'.format(helpers_module)) lines = pycharm.generate_module(dict((cls.__name__, cls) for cls in jsl.registry.iter_documents())) with open(package.filename, 'w') as f: f.write('\n'.join(lines)) filename = os.path.relpath(package.filename, os.getcwd()) print('Helpers module has been created. See {0} for the source code.'.format(filename)) @TukTukCommand.command def build_helpers(watch=False): app = current_app._get_current_object() if watch: # run_with_reloader seems to be internal API and is intended # for reloading webserver, but will do for now as a quick solution # TODO: rewrite run_with_reloader(lambda: _build_helpers(app)) else: _build_helpers(app)
7793f135808c1c133187f1d0a053b4f5549b58e8
lgogwebui.py
lgogwebui.py
#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json')) as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library'))
#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json'), encoding='utf-8') as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library'))
Correct UTF encoding of lgogdownloader json file.
Correct UTF encoding of lgogdownloader json file.
Python
bsd-2-clause
graag/lgogwebui,graag/lgogwebui,graag/lgogwebui
#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json')) as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library')) Correct UTF encoding of lgogdownloader json file.
#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json'), encoding='utf-8') as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library'))
<commit_before>#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json')) as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library')) <commit_msg>Correct UTF encoding of lgogdownloader json file.<commit_after>
#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json'), encoding='utf-8') as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library'))
#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json')) as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library')) Correct UTF encoding of lgogdownloader json file.#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json'), encoding='utf-8') as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library'))
<commit_before>#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json')) as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library')) <commit_msg>Correct UTF encoding of lgogdownloader json file.<commit_after>#!/usr/bin/env python3 import json import os import config from models import Game, Status, session from flask import Flask, render_template, redirect, url_for app = Flask(__name__) @app.route('/') def library(): with open(os.path.join(config.lgog_cache, 'gamedetails.json'), encoding='utf-8') as f: data = json.load(f) if data is None: return "Unable to load the GOG games database." for game in data['games']: game['download'] = -1 state = session.query(Game.state).filter(Game.name == game['gamename']).one() if state == 'queue': game['download'] = 0 elif state == 'running': game['download'] = 0 elif os.path.isdir(os.path.join(config.lgog_library, game['gamename'])): game['download'] = 1 return render_template('library.html', data=data['games']) @app.route('/download/<game>') def download(game): db_game = session.query(Game).filter(Game.name == game).one() if db_game.state != 'running': db_game.state = 'queue' session.commit() return redirect(url_for('library'))
12cac5280ab5c74b3497055c4104f23e52cdd5f1
scripts/generate_posts.py
scripts/generate_posts.py
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + ":" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + "&#58;" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
Use HTML entity for colon
Use HTML entity for colon
Python
bsd-3-clause
amperser/proselint,jstewmon/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + ":" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring) Use HTML entity for colon
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + "&#58;" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
<commit_before>import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + ":" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring) <commit_msg>Use HTML entity for colon<commit_after>
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + "&#58;" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + ":" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring) Use HTML entity for colonimport os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + "&#58;" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
<commit_before>import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + ":" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring) <commit_msg>Use HTML entity for colon<commit_after>import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + "&#58;" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
256bfb9e2d04fbd03ec2f4d3551e8a9d5ae11766
cardbox/deck_urls.py
cardbox/deck_urls.py
from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.index, name='index'), )
from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"), name='deck_list'), url(r'^new$', deck_views.DeckCreate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_new'), url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_edit'), url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view( template_name="cardbox/deck/deck_confirm_delete.html"), name='deck_delete'), url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view( template_name="cardbox/deck/deck_detail.html"), name='deck_detail') )
Add URL rules for deck CRUD
Add URL rules for deck CRUD
Python
mit
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.index, name='index'), ) Add URL rules for deck CRUD
from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"), name='deck_list'), url(r'^new$', deck_views.DeckCreate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_new'), url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_edit'), url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view( template_name="cardbox/deck/deck_confirm_delete.html"), name='deck_delete'), url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view( template_name="cardbox/deck/deck_detail.html"), name='deck_detail') )
<commit_before>from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.index, name='index'), ) <commit_msg>Add URL rules for deck CRUD<commit_after>
from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"), name='deck_list'), url(r'^new$', deck_views.DeckCreate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_new'), url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_edit'), url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view( template_name="cardbox/deck/deck_confirm_delete.html"), name='deck_delete'), url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view( template_name="cardbox/deck/deck_detail.html"), name='deck_detail') )
from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.index, name='index'), ) Add URL rules for deck CRUDfrom django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"), name='deck_list'), url(r'^new$', deck_views.DeckCreate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_new'), url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_edit'), url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view( template_name="cardbox/deck/deck_confirm_delete.html"), name='deck_delete'), url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view( template_name="cardbox/deck/deck_detail.html"), name='deck_detail') )
<commit_before>from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.index, name='index'), ) <commit_msg>Add URL rules for deck CRUD<commit_after>from django.conf.urls import patterns, include, url import deck_views urlpatterns = patterns('', url(r'^$', deck_views.DeckList.as_view(template_name="cardbox/deck/deck_list.html"), name='deck_list'), url(r'^new$', deck_views.DeckCreate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_new'), url(r'^edit/(?P<pk>\d+)$', deck_views.DeckUpdate.as_view( template_name="cardbox/deck/deck_form.html"), name='deck_edit'), url(r'^delete/(?P<pk>\d+)$', deck_views.DeckDelete.as_view( template_name="cardbox/deck/deck_confirm_delete.html"), name='deck_delete'), url(r'^detail/(?P<pk>\d+)/$', deck_views.DeckDetailView.as_view( template_name="cardbox/deck/deck_detail.html"), name='deck_detail') )
2f6bf949ae82b7ca3aa1785ca01e0cb17b3320c8
pinax/forums/tests/tests.py
pinax/forums/tests/tests.py
from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/category/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1)
from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/categories/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1)
Fix conflicting test. Test checks for categories/1/
Fix conflicting test. Test checks for categories/1/
Python
mit
pinax/pinax-forums
from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/category/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1) Fix conflicting test. Test checks for categories/1/
from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/categories/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1)
<commit_before>from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/category/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1) <commit_msg>Fix conflicting test. Test checks for categories/1/<commit_after>
from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/categories/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1)
from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/category/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1) Fix conflicting test. Test checks for categories/1/from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/categories/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1)
<commit_before>from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/category/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1) <commit_msg>Fix conflicting test. Test checks for categories/1/<commit_after>from django.test import TestCase from pinax.forums.models import Forum, ForumCategory class ForumCategoryTests(TestCase): def test_unicode_method(self): cat = ForumCategory(title="Software") self.assertEquals(str(cat), cat.title) def test_get_absolute_url(self): cat = ForumCategory.objects.create(title="Software") self.assertEquals(cat.get_absolute_url(), "/categories/1/") def test_forums_ordered_properly(self): cat = ForumCategory.objects.create(title="Software") Forum.objects.create(category=cat, title="Python", description="Python software") Forum.objects.create(category=cat, title="Swift", description="Swift software") Forum.objects.create(category=cat, title="Basic", description="Old software") forums = cat.forums self.assertEquals(forums[0].title, "Basic") self.assertEquals(forums[1].title, "Python") self.assertEquals(forums[2].title, "Swift") class ForumTests(TestCase): def test_forum_thread_count_is_zero(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.thread_count, 0) def test_inc_views(self): f = Forum.objects.create(title="Python", description="Python software") self.assertEquals(f.view_count, 0) f.inc_views() self.assertEquals(f.view_count, 1)
80b4aeb4c2c1c44ab19c91199d2532766eb556a9
client/exceptions.py
client/exceptions.py
"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass class Timeout(OkException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map))
"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass # TODO(albert): extend from a base class designed for student bugs. class Timeout(BaseException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map))
Make Timeout inherit from BaseException for now.
Make Timeout inherit from BaseException for now.
Python
apache-2.0
Cal-CS-61A-Staff/ok,jackzhao-mj/ok,jackzhao-mj/ok,jordonwii/ok,jackzhao-mj/ok,jordonwii/ok,jordonwii/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,jackzhao-mj/ok,Cal-CS-61A-Staff/ok,jordonwii/ok,Cal-CS-61A-Staff/ok
"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass class Timeout(OkException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map)) Make Timeout inherit from BaseException for now.
"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass # TODO(albert): extend from a base class designed for student bugs. class Timeout(BaseException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map))
<commit_before>"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass class Timeout(OkException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map)) <commit_msg>Make Timeout inherit from BaseException for now.<commit_after>
"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass # TODO(albert): extend from a base class designed for student bugs. class Timeout(BaseException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map))
"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass class Timeout(OkException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map)) Make Timeout inherit from BaseException for now."""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass # TODO(albert): extend from a base class designed for student bugs. class Timeout(BaseException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map))
<commit_before>"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass class Timeout(OkException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map)) <commit_msg>Make Timeout inherit from BaseException for now.<commit_after>"""Client exceptions.""" class OkException(BaseException): """Base exception for ok.py.""" pass # TODO(albert): extend from a base class designed for student bugs. class Timeout(BaseException): """Exception for timeouts.""" _message = 'Evaluation timed out!' def __init__(self, timeout): """Constructor. PARAMTERS: timeout -- int; number of seconds before timeout error occurred """ super().__init__(self) self.timeout = timeout class DeserializeError(OkException): """Exceptions related to deserialization.""" @classmethod def expect_dict(cls, json): return cls('Expected JSON dict, got {}'.format( type(json).__name__)) @classmethod def expect_list(cls, json): return cls('Expected JSON list, got {}'.format( type(json).__name__)) @classmethod def missing_fields(cls, fields): return cls('Missing fields: {}'.format( ', '.join(fields))) @classmethod def unexpected_field(cls, field): return cls('Unexpected field: {}'.format(field)) @classmethod def unexpected_value(cls, field, expect, actual): return cls( 'Field "{}" expected value {}, got {}'.format( field, expect, actual)) @classmethod def unexpected_type(cls, field, expect, actual): return cls( 'Field "{}" expected type {}, got {}'.format( field, expect, repr(actual))) @classmethod def unknown_type(cls, type_, case_map): return cls( 'TestCase type "{}" is unknown in case map {}'.format( type_, case_map))
98e333bafafc0161a256b2df895d269825910aab
mopidy/backends/dummy.py
mopidy/backends/dummy.py
from mopidy.backends import BaseBackend class DummyBackend(BaseBackend): def __init__(self, *args, **kwargs): super(DummyBackend, self).__init__(*args, **kwargs) def url_handlers(self): return [u'dummy:'] def _next(self): return True def _pause(self): return True def _play(self): return True def _play_id(self, songid): return True def _play_pos(self, songpos): return True def _previous(self): return True def _resume(self): return True
from mopidy.backends import (BaseBackend, BaseCurrentPlaylistController, BasePlaybackController, BaseLibraryController, BaseStoredPlaylistsController) class DummyBackend(BaseBackend): def __init__(self): self.current_playlist = DummyCurrentPlaylistController(backend=self) self.library = DummyLibraryController(backend=self) self.playback = DummyPlaybackController(backend=self) self.stored_playlists = DummyStoredPlaylistsController(backend=self) self.uri_handlers = [u'dummy:'] class DummyCurrentPlaylistController(BaseCurrentPlaylistController): pass class DummyLibraryController(BaseLibraryController): def search(self, type, query): return [] class DummyPlaybackController(BasePlaybackController): def _next(self): return True def _pause(self): return True def _play(self, track): return True def _previous(self): return True def _resume(self): return True class DummyStoredPlaylistsController(BaseStoredPlaylistsController): pass
Update DummyBackend to adhere to new backend API
Update DummyBackend to adhere to new backend API
Python
apache-2.0
mopidy/mopidy,ZenithDK/mopidy,abarisain/mopidy,jcass77/mopidy,hkariti/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,abarisain/mopidy,dbrgn/mopidy,dbrgn/mopidy,rawdlite/mopidy,diandiankan/mopidy,priestd09/mopidy,SuperStarPL/mopidy,ZenithDK/mopidy,woutervanwijk/mopidy,hkariti/mopidy,mokieyue/mopidy,liamw9534/mopidy,jodal/mopidy,jcass77/mopidy,mokieyue/mopidy,mopidy/mopidy,mopidy/mopidy,ali/mopidy,liamw9534/mopidy,priestd09/mopidy,kingosticks/mopidy,rawdlite/mopidy,bacontext/mopidy,pacificIT/mopidy,ZenithDK/mopidy,adamcik/mopidy,ali/mopidy,dbrgn/mopidy,tkem/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,quartz55/mopidy,bacontext/mopidy,diandiankan/mopidy,quartz55/mopidy,kingosticks/mopidy,hkariti/mopidy,glogiotatidis/mopidy,quartz55/mopidy,ZenithDK/mopidy,dbrgn/mopidy,swak/mopidy,bencevans/mopidy,bacontext/mopidy,ali/mopidy,tkem/mopidy,pacificIT/mopidy,jodal/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,bacontext/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,SuperStarPL/mopidy,ali/mopidy,quartz55/mopidy,pacificIT/mopidy,swak/mopidy,kingosticks/mopidy,hkariti/mopidy,diandiankan/mopidy,jmarsik/mopidy,bencevans/mopidy,swak/mopidy,bencevans/mopidy,vrs01/mopidy,tkem/mopidy,swak/mopidy,jcass77/mopidy,jmarsik/mopidy,adamcik/mopidy,vrs01/mopidy,glogiotatidis/mopidy,priestd09/mopidy,vrs01/mopidy,adamcik/mopidy,tkem/mopidy,jodal/mopidy,vrs01/mopidy,rawdlite/mopidy,bencevans/mopidy,jmarsik/mopidy,pacificIT/mopidy,diandiankan/mopidy
from mopidy.backends import BaseBackend class DummyBackend(BaseBackend): def __init__(self, *args, **kwargs): super(DummyBackend, self).__init__(*args, **kwargs) def url_handlers(self): return [u'dummy:'] def _next(self): return True def _pause(self): return True def _play(self): return True def _play_id(self, songid): return True def _play_pos(self, songpos): return True def _previous(self): return True def _resume(self): return True Update DummyBackend to adhere to new backend API
from mopidy.backends import (BaseBackend, BaseCurrentPlaylistController, BasePlaybackController, BaseLibraryController, BaseStoredPlaylistsController) class DummyBackend(BaseBackend): def __init__(self): self.current_playlist = DummyCurrentPlaylistController(backend=self) self.library = DummyLibraryController(backend=self) self.playback = DummyPlaybackController(backend=self) self.stored_playlists = DummyStoredPlaylistsController(backend=self) self.uri_handlers = [u'dummy:'] class DummyCurrentPlaylistController(BaseCurrentPlaylistController): pass class DummyLibraryController(BaseLibraryController): def search(self, type, query): return [] class DummyPlaybackController(BasePlaybackController): def _next(self): return True def _pause(self): return True def _play(self, track): return True def _previous(self): return True def _resume(self): return True class DummyStoredPlaylistsController(BaseStoredPlaylistsController): pass
<commit_before>from mopidy.backends import BaseBackend class DummyBackend(BaseBackend): def __init__(self, *args, **kwargs): super(DummyBackend, self).__init__(*args, **kwargs) def url_handlers(self): return [u'dummy:'] def _next(self): return True def _pause(self): return True def _play(self): return True def _play_id(self, songid): return True def _play_pos(self, songpos): return True def _previous(self): return True def _resume(self): return True <commit_msg>Update DummyBackend to adhere to new backend API<commit_after>
from mopidy.backends import (BaseBackend, BaseCurrentPlaylistController, BasePlaybackController, BaseLibraryController, BaseStoredPlaylistsController) class DummyBackend(BaseBackend): def __init__(self): self.current_playlist = DummyCurrentPlaylistController(backend=self) self.library = DummyLibraryController(backend=self) self.playback = DummyPlaybackController(backend=self) self.stored_playlists = DummyStoredPlaylistsController(backend=self) self.uri_handlers = [u'dummy:'] class DummyCurrentPlaylistController(BaseCurrentPlaylistController): pass class DummyLibraryController(BaseLibraryController): def search(self, type, query): return [] class DummyPlaybackController(BasePlaybackController): def _next(self): return True def _pause(self): return True def _play(self, track): return True def _previous(self): return True def _resume(self): return True class DummyStoredPlaylistsController(BaseStoredPlaylistsController): pass
from mopidy.backends import BaseBackend class DummyBackend(BaseBackend): def __init__(self, *args, **kwargs): super(DummyBackend, self).__init__(*args, **kwargs) def url_handlers(self): return [u'dummy:'] def _next(self): return True def _pause(self): return True def _play(self): return True def _play_id(self, songid): return True def _play_pos(self, songpos): return True def _previous(self): return True def _resume(self): return True Update DummyBackend to adhere to new backend APIfrom mopidy.backends import (BaseBackend, BaseCurrentPlaylistController, BasePlaybackController, BaseLibraryController, BaseStoredPlaylistsController) class DummyBackend(BaseBackend): def __init__(self): self.current_playlist = DummyCurrentPlaylistController(backend=self) self.library = DummyLibraryController(backend=self) self.playback = DummyPlaybackController(backend=self) self.stored_playlists = DummyStoredPlaylistsController(backend=self) self.uri_handlers = [u'dummy:'] class DummyCurrentPlaylistController(BaseCurrentPlaylistController): pass class DummyLibraryController(BaseLibraryController): def search(self, type, query): return [] class DummyPlaybackController(BasePlaybackController): def _next(self): return True def _pause(self): return True def _play(self, track): return True def _previous(self): return True def _resume(self): return True class DummyStoredPlaylistsController(BaseStoredPlaylistsController): pass
<commit_before>from mopidy.backends import BaseBackend class DummyBackend(BaseBackend): def __init__(self, *args, **kwargs): super(DummyBackend, self).__init__(*args, **kwargs) def url_handlers(self): return [u'dummy:'] def _next(self): return True def _pause(self): return True def _play(self): return True def _play_id(self, songid): return True def _play_pos(self, songpos): return True def _previous(self): return True def _resume(self): return True <commit_msg>Update DummyBackend to adhere to new backend API<commit_after>from mopidy.backends import (BaseBackend, BaseCurrentPlaylistController, BasePlaybackController, BaseLibraryController, BaseStoredPlaylistsController) class DummyBackend(BaseBackend): def __init__(self): self.current_playlist = DummyCurrentPlaylistController(backend=self) self.library = DummyLibraryController(backend=self) self.playback = DummyPlaybackController(backend=self) self.stored_playlists = DummyStoredPlaylistsController(backend=self) self.uri_handlers = [u'dummy:'] class DummyCurrentPlaylistController(BaseCurrentPlaylistController): pass class DummyLibraryController(BaseLibraryController): def search(self, type, query): return [] class DummyPlaybackController(BasePlaybackController): def _next(self): return True def _pause(self): return True def _play(self, track): return True def _previous(self): return True def _resume(self): return True class DummyStoredPlaylistsController(BaseStoredPlaylistsController): pass
914e419cd753f6815b2aa308b49d7ed357b523d6
muzicast/web/__init__.py
muzicast/web/__init__.py
import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') app.secret_key = os.urandom(24)
import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') #from muzicast.web.music import artist, album, track #app.register_module(artist, url_prefix='/artist') #app.register_module(album, url_prefix='/album') #app.register_module(track, url_prefix='/track') from muzicast.web.main import main app.register_module(main, url_prefix='/') app.secret_key = os.urandom(24)
Add handler modules as required
Add handler modules as required
Python
mit
nikhilm/muzicast,nikhilm/muzicast
import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') app.secret_key = os.urandom(24) Add handler modules as required
import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') #from muzicast.web.music import artist, album, track #app.register_module(artist, url_prefix='/artist') #app.register_module(album, url_prefix='/album') #app.register_module(track, url_prefix='/track') from muzicast.web.main import main app.register_module(main, url_prefix='/') app.secret_key = os.urandom(24)
<commit_before>import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') app.secret_key = os.urandom(24) <commit_msg>Add handler modules as required<commit_after>
import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') #from muzicast.web.music import artist, album, track #app.register_module(artist, url_prefix='/artist') #app.register_module(album, url_prefix='/album') #app.register_module(track, url_prefix='/track') from muzicast.web.main import main app.register_module(main, url_prefix='/') app.secret_key = os.urandom(24)
import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') app.secret_key = os.urandom(24) Add handler modules as requiredimport os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') #from muzicast.web.music import artist, album, track #app.register_module(artist, url_prefix='/artist') #app.register_module(album, url_prefix='/album') #app.register_module(track, url_prefix='/track') from muzicast.web.main import main app.register_module(main, url_prefix='/') app.secret_key = os.urandom(24)
<commit_before>import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') app.secret_key = os.urandom(24) <commit_msg>Add handler modules as required<commit_after>import os from flask import Flask app = Flask(__name__) from muzicast.web.admin import admin app.register_module(admin, url_prefix='/admin') #from muzicast.web.music import artist, album, track #app.register_module(artist, url_prefix='/artist') #app.register_module(album, url_prefix='/album') #app.register_module(track, url_prefix='/track') from muzicast.web.main import main app.register_module(main, url_prefix='/') app.secret_key = os.urandom(24)
77500ad76ce321287bcd0b33ea5eac1766583bc7
storage/mongo_storage.py
storage/mongo_storage.py
from storage import Storage class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): return 'Report ID' def get_report(self, report_id): return {2: {'report': 'data'}} def delete(self, report_id): return {'Message': 'deleted'}
from storage import Storage TASKS = [ {'task_id': 1, 'task_status': 'Complete', 'report_id': 1}, {'task_id': 2, 'task_status': 'Pending', 'report_id': None}, ] REPORTS = [ {'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}}, {'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}}, ] class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): report_id = REPORTS[-1]['report_id'] + 1 REPORTS.append({'report_id': report_id, 'report': report}) return report_id def get_report(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return {} return json.dumps(report[0]) def delete(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return False REPORTS.remove(report[0]) return True
Add mock functionality for mongo storage
Add mock functionality for mongo storage
Python
mpl-2.0
jmlong1027/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,mitre/multiscanner,MITRECND/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,mitre/multiscanner,awest1339/multiscanner
from storage import Storage class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): return 'Report ID' def get_report(self, report_id): return {2: {'report': 'data'}} def delete(self, report_id): return {'Message': 'deleted'} Add mock functionality for mongo storage
from storage import Storage TASKS = [ {'task_id': 1, 'task_status': 'Complete', 'report_id': 1}, {'task_id': 2, 'task_status': 'Pending', 'report_id': None}, ] REPORTS = [ {'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}}, {'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}}, ] class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): report_id = REPORTS[-1]['report_id'] + 1 REPORTS.append({'report_id': report_id, 'report': report}) return report_id def get_report(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return {} return json.dumps(report[0]) def delete(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return False REPORTS.remove(report[0]) return True
<commit_before>from storage import Storage class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): return 'Report ID' def get_report(self, report_id): return {2: {'report': 'data'}} def delete(self, report_id): return {'Message': 'deleted'} <commit_msg>Add mock functionality for mongo storage<commit_after>
from storage import Storage TASKS = [ {'task_id': 1, 'task_status': 'Complete', 'report_id': 1}, {'task_id': 2, 'task_status': 'Pending', 'report_id': None}, ] REPORTS = [ {'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}}, {'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}}, ] class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): report_id = REPORTS[-1]['report_id'] + 1 REPORTS.append({'report_id': report_id, 'report': report}) return report_id def get_report(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return {} return json.dumps(report[0]) def delete(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return False REPORTS.remove(report[0]) return True
from storage import Storage class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): return 'Report ID' def get_report(self, report_id): return {2: {'report': 'data'}} def delete(self, report_id): return {'Message': 'deleted'} Add mock functionality for mongo storagefrom storage import Storage TASKS = [ {'task_id': 1, 'task_status': 'Complete', 'report_id': 1}, {'task_id': 2, 'task_status': 'Pending', 'report_id': None}, ] REPORTS = [ {'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}}, {'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}}, ] class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): report_id = REPORTS[-1]['report_id'] + 1 REPORTS.append({'report_id': report_id, 'report': report}) return report_id def get_report(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return {} return json.dumps(report[0]) def delete(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return False REPORTS.remove(report[0]) return True
<commit_before>from storage import Storage class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): return 'Report ID' def get_report(self, report_id): return {2: {'report': 'data'}} def delete(self, report_id): return {'Message': 'deleted'} <commit_msg>Add mock functionality for mongo storage<commit_after>from storage import Storage TASKS = [ {'task_id': 1, 'task_status': 'Complete', 'report_id': 1}, {'task_id': 2, 'task_status': 'Pending', 'report_id': None}, ] REPORTS = [ {'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}}, {'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}}, ] class MongoStorage(Storage): def __init__(self, config_dict): self.db = config_dict['database'] self.host = config_dict['host'] self.port = config_dict['port'] self.username = config_dict['username'] self.password = config_dict['password'] self.index = config_dict['index'] self.doc_type = config_dict['doc_type'] def store(self, report): report_id = REPORTS[-1]['report_id'] + 1 REPORTS.append({'report_id': report_id, 'report': report}) return report_id def get_report(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return {} return json.dumps(report[0]) def delete(self, report_id): report = [report for report in REPORTS if report['report_id'] == report_id] if len(report) == 0: return False REPORTS.remove(report[0]) return True
ba80f05525bcaa7a9192b6e161158af184c6d234
treepace/trees.py
treepace/trees.py
"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node.""" self._value = value self._children = children @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__
"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node. The optional child node list will be shallow-copied. """ self._value = value self._children = list(children) @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child node to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__
Fix node list which was referenced, not copied
Fix node list which was referenced, not copied
Python
mit
sulir/treepace
"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node.""" self._value = value self._children = children @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__ Fix node list which was referenced, not copied
"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node. The optional child node list will be shallow-copied. """ self._value = value self._children = list(children) @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child node to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__
<commit_before>"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node.""" self._value = value self._children = children @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__ <commit_msg>Fix node list which was referenced, not copied<commit_after>
"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node. The optional child node list will be shallow-copied. """ self._value = value self._children = list(children) @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child node to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__
"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node.""" self._value = value self._children = children @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__ Fix node list which was referenced, not copied"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node. The optional child node list will be shallow-copied. """ self._value = value self._children = list(children) @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child node to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__
<commit_before>"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node.""" self._value = value self._children = children @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__ <commit_msg>Fix node list which was referenced, not copied<commit_after>"""Tree interfaces and implementations.""" class Node: """A general tree node with references to children. There is no distinction between a whole tree and a node - the tree is just represented by a root node. """ def __init__(self, value, children=[]): """Construct a new tree node. The optional child node list will be shallow-copied. """ self._value = value self._children = list(children) @property def value(self): """Return this node's value.""" return self._value @value.setter def value(self, _value): """Set the value of this node -- usually a string, but any object is accepted.""" self._value = _value @property def children(self): """Return a tuple containing the child nodes.""" return tuple(self._children) def add_child(self, child): """Add a child node to the end.""" self._children.append(child) def delete_child(self, index): """Delete the child node at the specified *index*.""" del self._children[index] def __repr__(self): return str(self.value) + str(self._children) def __eq__(self, other): return self.__dict__ == other.__dict__
f52318c12d89c2a415519d2d3869879c3edda887
test/proper_noun_test.py
test/proper_noun_test.py
from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Eilis and Jonathan at a workshop") assert result['score'] == 0.5
from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Jonathan and Eilis are at a workshop") assert result['score'] == 1 / 3
Work around quirk of POS tagger
Work around quirk of POS tagger
Python
mit
ejh243/MunroeJargonProfiler,ejh243/MunroeJargonProfiler
from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Eilis and Jonathan at a workshop") assert result['score'] == 0.5 Work around quirk of POS tagger
from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Jonathan and Eilis are at a workshop") assert result['score'] == 1 / 3
<commit_before> from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Eilis and Jonathan at a workshop") assert result['score'] == 0.5 <commit_msg>Work around quirk of POS tagger<commit_after>
from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Jonathan and Eilis are at a workshop") assert result['score'] == 1 / 3
from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Eilis and Jonathan at a workshop") assert result['score'] == 0.5 Work around quirk of POS tagger from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Jonathan and Eilis are at a workshop") assert result['score'] == 1 / 3
<commit_before> from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Eilis and Jonathan at a workshop") assert result['score'] == 0.5 <commit_msg>Work around quirk of POS tagger<commit_after> from jargonprofiler.util import tag_proper_nouns from jargonprofiler.munroe import munroe_score def test_proper_noun_in_sentence(): assert tag_proper_nouns("My name is Eilis.") == set(["Eilis"]) def test_proper_noun_begins_sentence(): assert tag_proper_nouns("Eilis is a girl") == set(["Eilis"]) def test_proper_noun_middle_sentence(): assert tag_proper_nouns("Today, Eilis is at CW17.") == set(["Eilis"]) def test_proper_noun_missing(): assert tag_proper_nouns("Today is cloudy at CW17.") == set() def test_two_proper_nouns(): assert tag_proper_nouns("Eilis Hannon is a girl.") == set(["Eilis", "Hannon"]) def test_munroe_with_proper_noun(): result = munroe_score("Eilis is a small girl") assert result["score"] == 1.0 def test_munroe_with_proper_noun_and_complex_words(): result = munroe_score("Jonathan and Eilis are at a workshop") assert result['score'] == 1 / 3
185906c1afc2bc38f0a7282e2b22e49262a73f9b
south/models.py
south/models.py
from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) class Meta: unique_together = (('app_name', 'migration'),) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration)
from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration)
Remove unique_together on the model; the key length was too long on wide-character MySQL installs.
Remove unique_together on the model; the key length was too long on wide-character MySQL installs.
Python
apache-2.0
smartfile/django-south,smartfile/django-south
from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) class Meta: unique_together = (('app_name', 'migration'),) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration) Remove unique_together on the model; the key length was too long on wide-character MySQL installs.
from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration)
<commit_before>from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) class Meta: unique_together = (('app_name', 'migration'),) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration) <commit_msg>Remove unique_together on the model; the key length was too long on wide-character MySQL installs.<commit_after>
from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration)
from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) class Meta: unique_together = (('app_name', 'migration'),) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration) Remove unique_together on the model; the key length was too long on wide-character MySQL installs.from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration)
<commit_before>from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) class Meta: unique_together = (('app_name', 'migration'),) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration) <commit_msg>Remove unique_together on the model; the key length was too long on wide-character MySQL installs.<commit_after>from django.db import models class MigrationHistory(models.Model): app_name = models.CharField(max_length=255) migration = models.CharField(max_length=255) applied = models.DateTimeField(blank=True) @classmethod def for_migration(cls, migration): try: return cls.objects.get(app_name=migration.app_label(), migration=migration.name()) except cls.DoesNotExist: return cls(app_name=migration.app_label(), migration=migration.name()) def get_migrations(self): from south.migration.base import Migrations return Migrations(self.app_name) def get_migration(self): return self.get_migrations().migration(self.migration)
3ab25e30e26ba7edf2f732ff0d4fa42b1446f6dc
txampext/test/test_axiomtypes.py
txampext/test/test_axiomtypes.py
try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime)
try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: # pragma: no cover axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime)
Add 'no cover' pragma to hide bogus missing code coverage
Add 'no cover' pragma to hide bogus missing code coverage
Python
isc
lvh/txampext
try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime) Add 'no cover' pragma to hide bogus missing code coverage
try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: # pragma: no cover axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime)
<commit_before>try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime) <commit_msg>Add 'no cover' pragma to hide bogus missing code coverage<commit_after>
try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: # pragma: no cover axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime)
try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime) Add 'no cover' pragma to hide bogus missing code coveragetry: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: # pragma: no cover axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime)
<commit_before>try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime) <commit_msg>Add 'no cover' pragma to hide bogus missing code coverage<commit_after>try: from txampext import axiomtypes; axiomtypes from axiom import attributes except ImportError: # pragma: no cover axiomtypes = None from twisted.protocols import amp from twisted.trial import unittest class TypeForTests(unittest.TestCase): skip = axiomtypes is None def _test_typeFor(self, attr, expectedType, **kwargs): asAMP = axiomtypes.typeFor(attr, **kwargs) self.assertTrue(isinstance(asAMP, expectedType)) return asAMP def test_optional(self): asAMP = axiomtypes.typeFor(attributes.text(), optional=True) self.assertTrue(asAMP.optional) def test_text(self): self._test_typeFor(attributes.text(), amp.Unicode) def test_bytes(self): self._test_typeFor(attributes.bytes(), amp.String) def test_integer(self): self._test_typeFor(attributes.integer(), amp.Integer) def test_decimals(self): for precision in range(1, 11): attr = getattr(attributes, "point{}decimal".format(precision)) self._test_typeFor(attr(), amp.Decimal) self._test_typeFor(attributes.money(), amp.Decimal) def test_float(self): self._test_typeFor(attributes.ieee754_double(), amp.Float) def test_timestamp(self): self._test_typeFor(attributes.timestamp(), amp.DateTime)
4ae366e41c91191733e4715d00127f163e3a89b0
influxalchemy/client.py
influxalchemy/client.py
""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(y for x in tags.get_points() for y in x.values())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(y for x in fields.get_points() for y in x.values())) return pts
""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(t['tagKey'] for t in tags.get_points())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(f['fieldKey'] for f in fields.get_points())) return pts
Fix how to get tags and fields name
Fix how to get tags and fields name The old implementation returns 'integer', 'float' in the result list.
Python
mit
amancevice/influxalchemy
""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(y for x in tags.get_points() for y in x.values())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(y for x in fields.get_points() for y in x.values())) return pts Fix how to get tags and fields name The old implementation returns 'integer', 'float' in the result list.
""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(t['tagKey'] for t in tags.get_points())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(f['fieldKey'] for f in fields.get_points())) return pts
<commit_before>""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(y for x in tags.get_points() for y in x.values())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(y for x in fields.get_points() for y in x.values())) return pts <commit_msg>Fix how to get tags and fields name The old implementation returns 'integer', 'float' in the result list.<commit_after>
""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(t['tagKey'] for t in tags.get_points())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(f['fieldKey'] for f in fields.get_points())) return pts
""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(y for x in tags.get_points() for y in x.values())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(y for x in fields.get_points() for y in x.values())) return pts Fix how to get tags and fields name The old implementation returns 'integer', 'float' in the result list.""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(t['tagKey'] for t in tags.get_points())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(f['fieldKey'] for f in fields.get_points())) return pts
<commit_before>""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(y for x in tags.get_points() for y in x.values())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(y for x in fields.get_points() for y in x.values())) return pts <commit_msg>Fix how to get tags and fields name The old implementation returns 'integer', 'float' in the result list.<commit_after>""" InfluxAlchemy Client. """ from . import query from .measurement import Measurement class InfluxAlchemy(object): """ InfluxAlchemy database session. client (InfluxDBClient): Connection to InfluxDB database """ def __init__(self, client): self.bind = client # pylint: disable=protected-access assert self.bind._database is not None, \ "InfluxDB client database cannot be None" def query(self, *entities): """ Query InfluxDB entities. Entities are either Measurements or Tags/Fields. """ return query.InfluxDBQuery(entities, self) def measurements(self): """ Get measurements of an InfluxDB. """ results = self.bind.query("SHOW MEASUREMENTS;") for res in results.get_points(): yield Measurement.new(str(res["name"])) def tags(self, measurement): """ Get tags of a measurements in InfluxDB. """ tags = self.bind.query("SHOW tag keys FROM %s" % measurement) pts = sorted(set(t['tagKey'] for t in tags.get_points())) return pts def fields(self, measurement): """ Get fields of a measurements in InfluxDB. """ fields = self.bind.query("SHOW field keys FROM %s" % measurement) pts = sorted(set(f['fieldKey'] for f in fields.get_points())) return pts
1e8cc5743f32bb5f6e2e9bcbee0f78e3df357449
tests/test_fastpbkdf2.py
tests/test_fastpbkdf2.py
import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1)
import binascii import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) @pytest.mark.parametrize("password,salt,iterations,length,derived_key", [ (b"password", b"salt", 1, 20, b"0c60c80f961f0e71f3a9b524af6012062fe037a6"), (b"password", b"salt", 2, 20, b"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), (b"password", b"salt", 4096, 20, b"4b007901b765489abead49d926f721d065a429c1"), (b"password", b"salt", 16777216, 20, b"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), (b"passwordPASSWORDpassword", b"saltSALTsaltSALTsaltSALTsaltSALTsalt", 4096, 25, b"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), (b"pass\0word", b"sa\0lt", 4096, 16, b"56fa6aa75548099dcc37d7f03425e0c3"), ]) def test_with_vectors(password, salt, iterations, length, derived_key): assert binascii.hexlify( pbkdf2_hmac("sha1", password, salt, iterations, length) ) == derived_key
Add test for RFC 6070 vectors.
Add test for RFC 6070 vectors.
Python
apache-2.0
Ayrx/python-fastpbkdf2,Ayrx/python-fastpbkdf2
import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) Add test for RFC 6070 vectors.
import binascii import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) @pytest.mark.parametrize("password,salt,iterations,length,derived_key", [ (b"password", b"salt", 1, 20, b"0c60c80f961f0e71f3a9b524af6012062fe037a6"), (b"password", b"salt", 2, 20, b"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), (b"password", b"salt", 4096, 20, b"4b007901b765489abead49d926f721d065a429c1"), (b"password", b"salt", 16777216, 20, b"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), (b"passwordPASSWORDpassword", b"saltSALTsaltSALTsaltSALTsaltSALTsalt", 4096, 25, b"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), (b"pass\0word", b"sa\0lt", 4096, 16, b"56fa6aa75548099dcc37d7f03425e0c3"), ]) def test_with_vectors(password, salt, iterations, length, derived_key): assert binascii.hexlify( pbkdf2_hmac("sha1", password, salt, iterations, length) ) == derived_key
<commit_before>import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) <commit_msg>Add test for RFC 6070 vectors.<commit_after>
import binascii import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) @pytest.mark.parametrize("password,salt,iterations,length,derived_key", [ (b"password", b"salt", 1, 20, b"0c60c80f961f0e71f3a9b524af6012062fe037a6"), (b"password", b"salt", 2, 20, b"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), (b"password", b"salt", 4096, 20, b"4b007901b765489abead49d926f721d065a429c1"), (b"password", b"salt", 16777216, 20, b"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), (b"passwordPASSWORDpassword", b"saltSALTsaltSALTsaltSALTsaltSALTsalt", 4096, 25, b"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), (b"pass\0word", b"sa\0lt", 4096, 16, b"56fa6aa75548099dcc37d7f03425e0c3"), ]) def test_with_vectors(password, salt, iterations, length, derived_key): assert binascii.hexlify( pbkdf2_hmac("sha1", password, salt, iterations, length) ) == derived_key
import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) Add test for RFC 6070 vectors.import binascii import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) @pytest.mark.parametrize("password,salt,iterations,length,derived_key", [ (b"password", b"salt", 1, 20, b"0c60c80f961f0e71f3a9b524af6012062fe037a6"), (b"password", b"salt", 2, 20, b"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), (b"password", b"salt", 4096, 20, b"4b007901b765489abead49d926f721d065a429c1"), (b"password", b"salt", 16777216, 20, b"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), (b"passwordPASSWORDpassword", b"saltSALTsaltSALTsaltSALTsaltSALTsalt", 4096, 25, b"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), (b"pass\0word", b"sa\0lt", 4096, 16, b"56fa6aa75548099dcc37d7f03425e0c3"), ]) def test_with_vectors(password, salt, iterations, length, derived_key): assert binascii.hexlify( pbkdf2_hmac("sha1", password, salt, iterations, length) ) == derived_key
<commit_before>import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) <commit_msg>Add test for RFC 6070 vectors.<commit_after>import binascii import pytest from fastpbkdf2 import pbkdf2_hmac def test_unsupported_algorithm(): with pytest.raises(ValueError): pbkdf2_hmac("foo", b"password", b"salt", 1) @pytest.mark.parametrize("password,salt,iterations,length,derived_key", [ (b"password", b"salt", 1, 20, b"0c60c80f961f0e71f3a9b524af6012062fe037a6"), (b"password", b"salt", 2, 20, b"ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), (b"password", b"salt", 4096, 20, b"4b007901b765489abead49d926f721d065a429c1"), (b"password", b"salt", 16777216, 20, b"eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), (b"passwordPASSWORDpassword", b"saltSALTsaltSALTsaltSALTsaltSALTsalt", 4096, 25, b"3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), (b"pass\0word", b"sa\0lt", 4096, 16, b"56fa6aa75548099dcc37d7f03425e0c3"), ]) def test_with_vectors(password, salt, iterations, length, derived_key): assert binascii.hexlify( pbkdf2_hmac("sha1", password, salt, iterations, length) ) == derived_key
fae0359357f21c4b2e7139bfd91b6b9499964a5b
class1/class1_ex6.py
class1/class1_ex6.py
#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.1", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f)
#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.0", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f)
Fix network dict value in list
Fix network dict value in list
Python
apache-2.0
ande0581/pynet
#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.1", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f) Fix network dict value in list
#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.0", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f)
<commit_before>#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.1", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f) <commit_msg>Fix network dict value in list<commit_after>
#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.0", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f)
#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.1", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f) Fix network dict value in list#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.0", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f)
<commit_before>#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.1", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f) <commit_msg>Fix network dict value in list<commit_after>#!/usr/bin/env python import yaml import json my_list = [1, 2, 3, 4, 5, 6, 7, 8, "Firewall", "Router", "Switch", {"network": "10.11.12.0", "subnet": "255.255.255.0", "gateway": "10.11.12.1"}, {"Model": "WS3560", "Vendor": "Cisco"}] # Write YAML file with open("my_yaml_file.yml", "w") as f: f.write(yaml.dump(my_list, default_flow_style=False)) # Write JSON file with open("my_json_file.json", "w") as f: json.dump(my_list, f)
90ade823700da61824c113759f847bf08823c148
nova/objects/__init__.py
nova/objects/__init__.py
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip')
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') __import__('nova.objects.security_group_rule')
Add security_group_rule to objects registry
Add security_group_rule to objects registry This adds the security_group_rule module to the objects registry, which allows a service to make sure that all of its objects are registered before any could be received over RPC. We don't really have a test for any of these because of the nature of how they're imported. Refactoring this later could provide some incremental steps to making this more testable. Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27 Closes-Bug: #1264816
Python
apache-2.0
citrix-openstack-build/oslo.versionedobjects,openstack/oslo.versionedobjects
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') Add security_group_rule to objects registry This adds the security_group_rule module to the objects registry, which allows a service to make sure that all of its objects are registered before any could be received over RPC. We don't really have a test for any of these because of the nature of how they're imported. Refactoring this later could provide some incremental steps to making this more testable. Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27 Closes-Bug: #1264816
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') __import__('nova.objects.security_group_rule')
<commit_before># Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') <commit_msg>Add security_group_rule to objects registry This adds the security_group_rule module to the objects registry, which allows a service to make sure that all of its objects are registered before any could be received over RPC. We don't really have a test for any of these because of the nature of how they're imported. Refactoring this later could provide some incremental steps to making this more testable. Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27 Closes-Bug: #1264816<commit_after>
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') __import__('nova.objects.security_group_rule')
# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') Add security_group_rule to objects registry This adds the security_group_rule module to the objects registry, which allows a service to make sure that all of its objects are registered before any could be received over RPC. We don't really have a test for any of these because of the nature of how they're imported. Refactoring this later could provide some incremental steps to making this more testable. Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27 Closes-Bug: #1264816# Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') __import__('nova.objects.security_group_rule')
<commit_before># Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') <commit_msg>Add security_group_rule to objects registry This adds the security_group_rule module to the objects registry, which allows a service to make sure that all of its objects are registered before any could be received over RPC. We don't really have a test for any of these because of the nature of how they're imported. Refactoring this later could provide some incremental steps to making this more testable. Change-Id: Ie96021f3cdeac6addab21c42a14cd8f136eb0b27 Closes-Bug: #1264816<commit_after># Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(danms): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('nova.objects.dns_domain') __import__('nova.objects.instance') __import__('nova.objects.instance_info_cache') __import__('nova.objects.security_group') __import__('nova.objects.migration') __import__('nova.objects.quotas') __import__('nova.objects.virtual_interface') __import__('nova.objects.network') __import__('nova.objects.block_device') __import__('nova.objects.fixed_ip') __import__('nova.objects.floating_ip') __import__('nova.objects.security_group_rule')
d473ab017fb5f3e74705fbbd903ec4675d26730a
tests/test_util.py
tests/test_util.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv("PATH", "/") which('hg') which('hg', '/')
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv(str("PATH"), str("/")) which('hg') which('hg', '/')
Fix test warning on python 2 envs
Fix test warning on python 2 envs
Python
mit
tony/libvcs
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv("PATH", "/") which('hg') which('hg', '/') Fix test warning on python 2 envs
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv(str("PATH"), str("/")) which('hg') which('hg', '/')
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv("PATH", "/") which('hg') which('hg', '/') <commit_msg>Fix test warning on python 2 envs<commit_after>
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv(str("PATH"), str("/")) which('hg') which('hg', '/')
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv("PATH", "/") which('hg') which('hg', '/') Fix test warning on python 2 envs# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv(str("PATH"), str("/")) which('hg') which('hg', '/')
<commit_before># -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv("PATH", "/") which('hg') which('hg', '/') <commit_msg>Fix test warning on python 2 envs<commit_after># -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import pytest from libvcs.util import mkdir_p, which def test_mkdir_p(tmpdir): path = tmpdir.join('file').ensure() with pytest.raises(Exception) as excinfo: mkdir_p(str(path)) excinfo.match(r'Could not create directory %s' % path) # already exists is a noop mkdir_p(str(tmpdir)) def test_which_no_hg_found(monkeypatch): monkeypatch.setenv(str("PATH"), str("/")) which('hg') which('hg', '/')
3a586e2d584de1a70dd62ca0c9548fbc7a092164
calvin/runtime/south/calvinlib/textformatlib/Pystache.py
calvin/runtime/south/calvinlib/textformatlib/Pystache.py
# -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Functions for manipulating strings. """ init_schema = { "description": "setup mustache formated strings", "type": "object", "properties": {} } render = { "description": "convert dict structure into string", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, *context, **kwargs): return pystache.render(template, *context, **kwargs)
# -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Module for formatting strings using Mustache-style templates. """ render_schema = { "description": "Return template string rendered using given dictionary", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, dictionary): return pystache.render(template, **dictionary)
Fix erroneous schema naming & others
calvinlib: Fix erroneous schema naming & others
Python
apache-2.0
EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base
# -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Functions for manipulating strings. """ init_schema = { "description": "setup mustache formated strings", "type": "object", "properties": {} } render = { "description": "convert dict structure into string", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, *context, **kwargs): return pystache.render(template, *context, **kwargs) calvinlib: Fix erroneous schema naming & others
# -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Module for formatting strings using Mustache-style templates. """ render_schema = { "description": "Return template string rendered using given dictionary", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, dictionary): return pystache.render(template, **dictionary)
<commit_before># -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Functions for manipulating strings. """ init_schema = { "description": "setup mustache formated strings", "type": "object", "properties": {} } render = { "description": "convert dict structure into string", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, *context, **kwargs): return pystache.render(template, *context, **kwargs) <commit_msg>calvinlib: Fix erroneous schema naming & others<commit_after>
# -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Module for formatting strings using Mustache-style templates. """ render_schema = { "description": "Return template string rendered using given dictionary", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, dictionary): return pystache.render(template, **dictionary)
# -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Functions for manipulating strings. """ init_schema = { "description": "setup mustache formated strings", "type": "object", "properties": {} } render = { "description": "convert dict structure into string", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, *context, **kwargs): return pystache.render(template, *context, **kwargs) calvinlib: Fix erroneous schema naming & others# -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Module for formatting strings using Mustache-style templates. """ render_schema = { "description": "Return template string rendered using given dictionary", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, dictionary): return pystache.render(template, **dictionary)
<commit_before># -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Functions for manipulating strings. """ init_schema = { "description": "setup mustache formated strings", "type": "object", "properties": {} } render = { "description": "convert dict structure into string", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, *context, **kwargs): return pystache.render(template, *context, **kwargs) <commit_msg>calvinlib: Fix erroneous schema naming & others<commit_after># -*- coding: utf-8 -*- # Copyright (c) 2017 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from calvin.runtime.south.calvinlib import base_calvinlib_object import pystache class Pystache(base_calvinlib_object.BaseCalvinlibObject): """ Module for formatting strings using Mustache-style templates. """ render_schema = { "description": "Return template string rendered using given dictionary", "type": "object", "properties": { "template": {"type": "string"}, "dictionary": {"type": "dict"} } } def init(self): pass def render(self, template, dictionary): return pystache.render(template, **dictionary)
0197553740ff6b542515cb53ce816d629e7b5648
tspapi/__init__.py
tspapi/__init__.py
from __future__ import absolute_import from tspapi.api_call import _ApiCall from tspapi.api import API from tspapi.measurement import Measurement from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric
from __future__ import absolute_import from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.measurement import Measurement from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric from tspapi.api_call import _ApiCall from tspapi.api import API
Rearrange imports for proper dependencies
Rearrange imports for proper dependencies
Python
apache-2.0
jdgwartney/pulse-api-python
from __future__ import absolute_import from tspapi.api_call import _ApiCall from tspapi.api import API from tspapi.measurement import Measurement from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric Rearrange imports for proper dependencies
from __future__ import absolute_import from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.measurement import Measurement from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric from tspapi.api_call import _ApiCall from tspapi.api import API
<commit_before>from __future__ import absolute_import from tspapi.api_call import _ApiCall from tspapi.api import API from tspapi.measurement import Measurement from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric <commit_msg>Rearrange imports for proper dependencies<commit_after>
from __future__ import absolute_import from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.measurement import Measurement from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric from tspapi.api_call import _ApiCall from tspapi.api import API
from __future__ import absolute_import from tspapi.api_call import _ApiCall from tspapi.api import API from tspapi.measurement import Measurement from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric Rearrange imports for proper dependenciesfrom __future__ import absolute_import from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.measurement import Measurement from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric from tspapi.api_call import _ApiCall from tspapi.api import API
<commit_before>from __future__ import absolute_import from tspapi.api_call import _ApiCall from tspapi.api import API from tspapi.measurement import Measurement from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric <commit_msg>Rearrange imports for proper dependencies<commit_after>from __future__ import absolute_import from tspapi.api_exception import ConnectionError from tspapi.api_exception import HTTPResponseError from tspapi.measurement import Measurement from tspapi.source import Source from tspapi.event import RawEvent from tspapi.event import Event from tspapi.metric import Metric from tspapi.api_call import _ApiCall from tspapi.api import API
1854b4b667cd07ca4f4426bd40d94c3e42ed19e6
chatterbot/__init__.py
chatterbot/__init__.py
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', )
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a3' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', )
Increment package version to 1.0.0a3
Increment package version to 1.0.0a3
Python
bsd-3-clause
vkosuri/ChatterBot,gunthercox/ChatterBot
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', ) Increment package version to 1.0.0a3
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a3' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', )
<commit_before>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', ) <commit_msg>Increment package version to 1.0.0a3<commit_after>
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a3' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', )
""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', ) Increment package version to 1.0.0a3""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a3' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', )
<commit_before>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a2' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', ) <commit_msg>Increment package version to 1.0.0a3<commit_after>""" ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot __version__ = '1.0.0a3' __author__ = 'Gunther Cox' __email__ = 'gunthercx@gmail.com' __url__ = 'https://github.com/gunthercox/ChatterBot' __all__ = ( 'ChatBot', )
f7b43e5bc14f7fc03d085d695dbad4d910a21453
wordpop.py
wordpop.py
#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + '" "' + definition + '\n<b>Ex:</b> ' + example_sentence + '"' print cmd os.system(cmd)
#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) #Oxford API usually returns a list of synonyms #Here we are only returning the first one def fetch_synonym(synonyms): if synonyms != "none": return synonyms.split(',')[0] else: return None key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] synonym = fetch_synonym(word_data['synonyms']) #This is done fot the sake of formatting if synonym: synonym = ' | ' + synonym cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + synonym + '" "' + definition + '\nEx: ' + example_sentence + '"' print cmd os.system(cmd)
Append synonym of the word in notification's titie
Append synonym of the word in notification's titie
Python
mit
sbmthakur/wordpop
#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + '" "' + definition + '\n<b>Ex:</b> ' + example_sentence + '"' print cmd os.system(cmd) Append synonym of the word in notification's titie
#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) #Oxford API usually returns a list of synonyms #Here we are only returning the first one def fetch_synonym(synonyms): if synonyms != "none": return synonyms.split(',')[0] else: return None key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] synonym = fetch_synonym(word_data['synonyms']) #This is done fot the sake of formatting if synonym: synonym = ' | ' + synonym cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + synonym + '" "' + definition + '\nEx: ' + example_sentence + '"' print cmd os.system(cmd)
<commit_before>#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + '" "' + definition + '\n<b>Ex:</b> ' + example_sentence + '"' print cmd os.system(cmd) <commit_msg>Append synonym of the word in notification's titie<commit_after>
#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) #Oxford API usually returns a list of synonyms #Here we are only returning the first one def fetch_synonym(synonyms): if synonyms != "none": return synonyms.split(',')[0] else: return None key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] synonym = fetch_synonym(word_data['synonyms']) #This is done fot the sake of formatting if synonym: synonym = ' | ' + synonym cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + synonym + '" "' + definition + '\nEx: ' + example_sentence + '"' print cmd os.system(cmd)
#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + '" "' + definition + '\n<b>Ex:</b> ' + example_sentence + '"' print cmd os.system(cmd) Append synonym of the word in notification's titie#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) #Oxford API usually returns a list of synonyms #Here we are only returning the first one def fetch_synonym(synonyms): if synonyms != "none": return synonyms.split(',')[0] else: return None key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] synonym = fetch_synonym(word_data['synonyms']) #This is done fot the sake of formatting if synonym: synonym = ' | ' + synonym cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + synonym + '" "' + definition + '\nEx: ' + example_sentence + '"' print cmd os.system(cmd)
<commit_before>#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + '" "' + definition + '\n<b>Ex:</b> ' + example_sentence + '"' print cmd os.system(cmd) <commit_msg>Append synonym of the word in notification's titie<commit_after>#!/usr/bin/env python import os import redis import json import random redis_client = redis.StrictRedis(host='localhost', port=6379, db=0) #Oxford API usually returns a list of synonyms #Here we are only returning the first one def fetch_synonym(synonyms): if synonyms != "none": return synonyms.split(',')[0] else: return None key = redis_client.randomkey() json_str = redis_client.get(key) word_data = json.loads(json_str) lexical_entries_length = len(word_data['results'][0]['lexicalEntries']) random_entry = random.randint(0, lexical_entries_length - 1) lexical_category = word_data['results'][0]['lexicalEntries'][random_entry]['lexicalCategory'] definition = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['definitions'][0] #Following try-except fetches an example sentence for the word try: example_sentence = word_data['results'][0]['lexicalEntries'][random_entry]['entries'][0]['senses'][0]['examples'][0]['text'] except LookupError: #You will arrive here if there is no "text" key example_sentence = "None" word = word_data['results'][0]['word'] synonym = fetch_synonym(word_data['synonyms']) #This is done fot the sake of formatting if synonym: synonym = ' | ' + synonym cmd = '/usr/bin/notify-send "' + word + ' | ' + lexical_category + synonym + '" "' + definition + '\nEx: ' + example_sentence + '"' print cmd os.system(cmd)
076295a997e0dd5ac693b60459b63437ee369411
comics/crawler/crawlers/whiteninja.py
comics/crawler/crawlers/whiteninja.py
from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 15 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]')
from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 60 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]')
Increase history capable days from 15 to 60 for White Ninja
Increase history capable days from 15 to 60 for White Ninja
Python
agpl-3.0
klette/comics,datagutten/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics
from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 15 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]') Increase history capable days from 15 to 60 for White Ninja
from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 60 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]')
<commit_before>from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 15 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]') <commit_msg>Increase history capable days from 15 to 60 for White Ninja<commit_after>
from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 60 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]')
from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 15 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]') Increase history capable days from 15 to 60 for White Ninjafrom comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 60 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]')
<commit_before>from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 15 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]') <commit_msg>Increase history capable days from 15 to 60 for White Ninja<commit_after>from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'White Ninja' language = 'en' url = 'http://www.whiteninjacomics.com/' start_date = '2002-01-01' history_capable_days = 60 time_zone = -6 rights = 'Scott Bevan & Kent Earle' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://www.whiteninjacomics.com/rss/z-latest.xml') for entry in self.feed.entries: if (entry.updated_parsed and self.timestamp_to_date(entry.updated_parsed) == self.pub_date): self.title = entry.title.split(' - ')[0] self.web_url = entry.link break if self.web_url is None: return page = LxmlParser(self.web_url) page.remove('img[src^="http://www.whiteninjacomics.com/images/comics/t-"]') self.url = page.src('img[src^="http://www.whiteninjacomics.com/images/comics/"]')
bbedbab40ba6fc6b958eb7bdc5b50cef58ad0240
bijgeschaafd/settings_test.py
bijgeschaafd/settings_test.py
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } }
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } SECRET_KEY='BULLSHIT'
Add some SECRET_KEY to the test settings in order to make Travis run.
Add some SECRET_KEY to the test settings in order to make Travis run.
Python
mit
flupzor/newsdiffs,flupzor/bijgeschaafd,flupzor/newsdiffs,flupzor/bijgeschaafd,flupzor/bijgeschaafd,flupzor/bijgeschaafd,flupzor/newsdiffs,flupzor/newsdiffs
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } Add some SECRET_KEY to the test settings in order to make Travis run.
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } SECRET_KEY='BULLSHIT'
<commit_before>import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } <commit_msg>Add some SECRET_KEY to the test settings in order to make Travis run.<commit_after>
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } SECRET_KEY='BULLSHIT'
import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } Add some SECRET_KEY to the test settings in order to make Travis run.import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } SECRET_KEY='BULLSHIT'
<commit_before>import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } <commit_msg>Add some SECRET_KEY to the test settings in order to make Travis run.<commit_after>import os from settings_base import * APP_ROOT = os.path.dirname(os.path.abspath(__file__)) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.dirname(APP_ROOT)+'/newsdiffs.db', } } SECRET_KEY='BULLSHIT'
5dec564479d0fb735cfb972a9790f0d5e5197fa1
bluebottle/homepage/models.py
bluebottle/homepage/models.py
from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 3: self.projects = projects[0:3] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self
from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 4: self.projects = projects[0:4] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self
Return 4 projects for homepage
Return 4 projects for homepage
Python
bsd-3-clause
jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle
from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 3: self.projects = projects[0:3] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self Return 4 projects for homepage
from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 4: self.projects = projects[0:4] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self
<commit_before>from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 3: self.projects = projects[0:3] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self <commit_msg>Return 4 projects for homepage<commit_after>
from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 4: self.projects = projects[0:4] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self
from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 3: self.projects = projects[0:3] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self Return 4 projects for homepagefrom bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 4: self.projects = projects[0:4] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self
<commit_before>from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 3: self.projects = projects[0:3] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self <commit_msg>Return 4 projects for homepage<commit_after>from bluebottle.quotes.models import Quote from bluebottle.slides.models import Slide from bluebottle.statistics.models import Statistic from bluebottle.utils.model_dispatcher import get_project_model PROJECT_MODEL = get_project_model() class HomePage(object): """ Instead of serving all the objects separately we combine Slide, Quote and Stats into a dummy object """ def get(self, language): self.id = language self.quotes = Quote.objects.published().filter(language=language) self.slides = Slide.objects.published().filter(language=language) self.statistics = Statistic.objects.filter(active=True).all() projects = PROJECT_MODEL.objects.filter(is_campaign=True, status__viewable=True) if language == 'en': projects = projects.filter(language__code=language) projects = projects.order_by('?') if len(projects) > 4: self.projects = projects[0:4] elif len(projects) > 0: self.projects = projects[0:len(projects)] else: self.projects = None return self
c88969f2e4a1459e6a66da3f75f8ea41ae9cea9c
rplugin/python3/denite/source/denite_gtags/tags_base.py
rplugin/python3/denite/source/denite_gtags/tags_base.py
import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') def __init__(self, vim): super().__init__(vim) @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): if len(context['args']) > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + [word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups()
import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): args_count = len(context['args']) if args_count > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + ['--', word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups()
Fix error on words beginning with '-'
Fix error on words beginning with '-'
Python
mit
ozelentok/denite-gtags
import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') def __init__(self, vim): super().__init__(vim) @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): if len(context['args']) > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + [word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups() Fix error on words beginning with '-'
import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): args_count = len(context['args']) if args_count > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + ['--', word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups()
<commit_before>import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') def __init__(self, vim): super().__init__(vim) @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): if len(context['args']) > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + [word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups() <commit_msg>Fix error on words beginning with '-'<commit_after>
import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): args_count = len(context['args']) if args_count > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + ['--', word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups()
import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') def __init__(self, vim): super().__init__(vim) @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): if len(context['args']) > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + [word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups() Fix error on words beginning with '-'import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): args_count = len(context['args']) if args_count > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + ['--', word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups()
<commit_before>import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') def __init__(self, vim): super().__init__(vim) @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): if len(context['args']) > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + [word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups() <commit_msg>Fix error on words beginning with '-'<commit_after>import re from abc import abstractmethod from denite_gtags import GtagsBase # pylint: disable=locally-disabled, wrong-import-position class TagsBase(GtagsBase): TAG_PATTERN = re.compile('([^\t]+)\t(\\d+)\t(.*)') @abstractmethod def get_search_flags(self): return [] def get_search_word(self, context): args_count = len(context['args']) if args_count > 0: return context['args'][0] return context['input'] def gather_candidates(self, context): word = self.get_search_word(context) tags = self.exec_global(self.get_search_flags() + ['--', word], context) candidates = self._convert_to_candidates(tags) return candidates @classmethod def _convert_to_candidates(cls, tags): candidates = [] for tag in tags: path, line, text = cls._parse_tag(tag) col = text.find(text) -1 candidates.append({ 'word': tag, 'action__path': path, 'action__line': line, 'action__text': text, 'action__col': col }) return candidates @classmethod def _parse_tag(cls, tag): match = cls.TAG_PATTERN.match(tag) return match.groups()
71b07eddba9400eb9729da00fba5d88b0fcefe57
cw/cw305/util/plot.py
cw/cw305/util/plot.py
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot)
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file from bokeh.models import tools def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) plot.add_tools(tools.CrosshairTool()) plot.add_tools(tools.HoverTool()) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot)
Add crosshair and hover tools to bokeh html output
Add crosshair and hover tools to bokeh html output Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com>
Python
apache-2.0
lowRISC/ot-sca,lowRISC/ot-sca
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot) Add crosshair and hover tools to bokeh html output Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com>
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file from bokeh.models import tools def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) plot.add_tools(tools.CrosshairTool()) plot.add_tools(tools.HoverTool()) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot)
<commit_before># Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot) <commit_msg>Add crosshair and hover tools to bokeh html output Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com><commit_after>
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file from bokeh.models import tools def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) plot.add_tools(tools.CrosshairTool()) plot.add_tools(tools.HoverTool()) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot)
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot) Add crosshair and hover tools to bokeh html output Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com># Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file from bokeh.models import tools def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) plot.add_tools(tools.CrosshairTool()) plot.add_tools(tools.HoverTool()) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot)
<commit_before># Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot) <commit_msg>Add crosshair and hover tools to bokeh html output Signed-off-by: Alphan Ulusoy <23b245cc5a07aacf75a9db847b24c67dee1707bf@google.com><commit_after># Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import itertools from bokeh.plotting import figure, show from bokeh.palettes import Dark2_5 as palette from bokeh.io import output_file from bokeh.models import tools def save_plot_to_file(traces, num_traces, outfile): """Save plot figure to file.""" colors = itertools.cycle(palette) xrange = range(len(traces[0])) plot = figure(plot_width=800) plot.add_tools(tools.CrosshairTool()) plot.add_tools(tools.HoverTool()) for i in range(min(len(traces), num_traces)): plot.line(xrange, traces[i], line_color=next(colors)) output_file(outfile) show(plot)
c81d075dd11591e6b68f3f1444d80200db24bfad
install.py
install.py
#!/usr/bin/env python import os import stat import config TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Please copy config.py.dist to config.py and retry.' exit(1) source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable'
#!/usr/bin/env python import os import stat import config import shutil TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Created personal config.py for your customizations' shutil.copyfile('config.py.dist', 'config.py') source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' exit(1)
Create a config.py rather than having people do it manually.
Create a config.py rather than having people do it manually.
Python
mit
banga/powerline-shell,intfrr/powerline-shell,ceholden/powerline-shell,banga/powerline-shell,bitIO/powerline-shell,paulhybryant/powerline-shell,b-ryan/powerline-shell,LeonardoGentile/powerline-shell,yc2prime/powerline-shell,dtrip/powerline-shell,fellipecastro/powerline-shell,tswsl1989/powerline-shell,paulhybryant/powerline-shell,saghul/shline,blieque/powerline-shell,MartinWetterwald/powerline-shell,iKrishneel/powerline-shell,torbjornvatn/powerline-shell,mcdope/powerline-shell,handsomecheung/powerline-shell,junix/powerline-shell,Menci/powerline-shell,wrgoldstein/powerline-shell,mart-e/powerline-shell,paol/powerline-shell,JulianVolodia/powerline-shell,rbanffy/powerline-shell,eran-stratoscale/powerline-shell,milkbikis/powerline-shell,b-ryan/powerline-shell,strycore/powerline-shell,guykr-stratoscale/powerline-shell,nicholascapo/powerline-shell
#!/usr/bin/env python import os import stat import config TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Please copy config.py.dist to config.py and retry.' exit(1) source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' Create a config.py rather than having people do it manually.
#!/usr/bin/env python import os import stat import config import shutil TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Created personal config.py for your customizations' shutil.copyfile('config.py.dist', 'config.py') source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' exit(1)
<commit_before>#!/usr/bin/env python import os import stat import config TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Please copy config.py.dist to config.py and retry.' exit(1) source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' <commit_msg>Create a config.py rather than having people do it manually.<commit_after>
#!/usr/bin/env python import os import stat import config import shutil TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Created personal config.py for your customizations' shutil.copyfile('config.py.dist', 'config.py') source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' exit(1)
#!/usr/bin/env python import os import stat import config TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Please copy config.py.dist to config.py and retry.' exit(1) source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' Create a config.py rather than having people do it manually.#!/usr/bin/env python import os import stat import config import shutil TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Created personal config.py for your customizations' shutil.copyfile('config.py.dist', 'config.py') source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' exit(1)
<commit_before>#!/usr/bin/env python import os import stat import config TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Please copy config.py.dist to config.py and retry.' exit(1) source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' <commit_msg>Create a config.py rather than having people do it manually.<commit_after>#!/usr/bin/env python import os import stat import config import shutil TEMPLATE_FILE = 'powerline-shell.py.template' OUTPUT_FILE = 'powerline-shell.py' SEGMENTS_DIR = 'segments' THEMES_DIR = 'themes' def load_source(srcfile): try: return ''.join(open(srcfile).readlines()) + '\n\n' except IOError: print 'Could not open', srcfile return '' if __name__ == "__main__": try: with open('config.py'): pass except IOError: print 'Created personal config.py for your customizations' shutil.copyfile('config.py.dist', 'config.py') source = load_source(TEMPLATE_FILE) source += load_source(os.path.join(THEMES_DIR, config.THEME + '.py')) for segment in config.SEGMENTS: source += load_source(os.path.join(SEGMENTS_DIR, segment + '.py')) source += 'sys.stdout.write(powerline.draw())\n' try: open(OUTPUT_FILE, 'w').write(source) st = os.stat(OUTPUT_FILE) os.chmod(OUTPUT_FILE, st.st_mode | stat.S_IEXEC) print OUTPUT_FILE, 'saved successfully' except IOError: print 'ERROR: Could not write to powerline-shell.py. Make sure it is writable' exit(1)
6b17ac63c7bac79eea36733be32147f3867beda8
cis/plugins/validation/mozilliansorg_publisher_plugin.py
cis/plugins/validation/mozilliansorg_publisher_plugin.py
import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True # XXX TODO Validate groups only come from Mozillians.org return True
import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups and whitelists which fields mozillians.org has authority over. :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True ## Validate only whitelisted fields for this publisher are in use whitelist = [ 'timezone', 'displayName', 'firstName', 'lastName', 'preferredLanguage', 'primaryEmail', 'emails', 'phoneNumbers', 'uris', 'nicknames', 'SSHFingerprints', 'PGPFingerprints', 'picture', 'shirtSize', 'groups' ] # Check for any non-whitelisted attribute modification # Note that no extra attribute may be added at this stage as we already performed schema validation for attr in user: if attr not in whitelist: if profile_json.get(attr) != user.get(attr): logger.exception('permission denied: publisher {} attempted to modify user attributes it has no authority over'.format(publisher)) return False ## Validate namespaced groups only come from Mozillians.org # This is the whitelisted group prefix for this publisher: # group sub structure looks like: user.groups = [ 'group_from_ldap1', 'moziliansorg_mytestgroup', ...] prefix = 'mozilliansorg_' old_groups = user.get('groups') new_groups = profile_json.get('groups') # Check is we have any non-mozilliansorg group that has been *removed* for g in old_groups: if not g.startswith(prefix): if g not in new_groups: logger.exception('permission denied: publisher {} attempted to remove groups it has no authority over'.format(publisher)) return False # Check is we have any non-mozilliansorg group that has been *added* for g in new_groups: if not g.startswith(prefix): if g not in old_groups: logger.exception('permission denied: publisher {} attempted to add groups it has no authority over'.format(publisher)) return False return True
Add mozillians.org validation checks (tests passed)
Add mozillians.org validation checks (tests passed)
Python
mpl-2.0
mozilla-iam/cis,mozilla-iam/cis
import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True # XXX TODO Validate groups only come from Mozillians.org return True Add mozillians.org validation checks (tests passed)
import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups and whitelists which fields mozillians.org has authority over. :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True ## Validate only whitelisted fields for this publisher are in use whitelist = [ 'timezone', 'displayName', 'firstName', 'lastName', 'preferredLanguage', 'primaryEmail', 'emails', 'phoneNumbers', 'uris', 'nicknames', 'SSHFingerprints', 'PGPFingerprints', 'picture', 'shirtSize', 'groups' ] # Check for any non-whitelisted attribute modification # Note that no extra attribute may be added at this stage as we already performed schema validation for attr in user: if attr not in whitelist: if profile_json.get(attr) != user.get(attr): logger.exception('permission denied: publisher {} attempted to modify user attributes it has no authority over'.format(publisher)) return False ## Validate namespaced groups only come from Mozillians.org # This is the whitelisted group prefix for this publisher: # group sub structure looks like: user.groups = [ 'group_from_ldap1', 'moziliansorg_mytestgroup', ...] prefix = 'mozilliansorg_' old_groups = user.get('groups') new_groups = profile_json.get('groups') # Check is we have any non-mozilliansorg group that has been *removed* for g in old_groups: if not g.startswith(prefix): if g not in new_groups: logger.exception('permission denied: publisher {} attempted to remove groups it has no authority over'.format(publisher)) return False # Check is we have any non-mozilliansorg group that has been *added* for g in new_groups: if not g.startswith(prefix): if g not in old_groups: logger.exception('permission denied: publisher {} attempted to add groups it has no authority over'.format(publisher)) return False return True
<commit_before>import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True # XXX TODO Validate groups only come from Mozillians.org return True <commit_msg>Add mozillians.org validation checks (tests passed)<commit_after>
import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups and whitelists which fields mozillians.org has authority over. :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True ## Validate only whitelisted fields for this publisher are in use whitelist = [ 'timezone', 'displayName', 'firstName', 'lastName', 'preferredLanguage', 'primaryEmail', 'emails', 'phoneNumbers', 'uris', 'nicknames', 'SSHFingerprints', 'PGPFingerprints', 'picture', 'shirtSize', 'groups' ] # Check for any non-whitelisted attribute modification # Note that no extra attribute may be added at this stage as we already performed schema validation for attr in user: if attr not in whitelist: if profile_json.get(attr) != user.get(attr): logger.exception('permission denied: publisher {} attempted to modify user attributes it has no authority over'.format(publisher)) return False ## Validate namespaced groups only come from Mozillians.org # This is the whitelisted group prefix for this publisher: # group sub structure looks like: user.groups = [ 'group_from_ldap1', 'moziliansorg_mytestgroup', ...] prefix = 'mozilliansorg_' old_groups = user.get('groups') new_groups = profile_json.get('groups') # Check is we have any non-mozilliansorg group that has been *removed* for g in old_groups: if not g.startswith(prefix): if g not in new_groups: logger.exception('permission denied: publisher {} attempted to remove groups it has no authority over'.format(publisher)) return False # Check is we have any non-mozilliansorg group that has been *added* for g in new_groups: if not g.startswith(prefix): if g not in old_groups: logger.exception('permission denied: publisher {} attempted to add groups it has no authority over'.format(publisher)) return False return True
import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True # XXX TODO Validate groups only come from Mozillians.org return True Add mozillians.org validation checks (tests passed)import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups and whitelists which fields mozillians.org has authority over. :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True ## Validate only whitelisted fields for this publisher are in use whitelist = [ 'timezone', 'displayName', 'firstName', 'lastName', 'preferredLanguage', 'primaryEmail', 'emails', 'phoneNumbers', 'uris', 'nicknames', 'SSHFingerprints', 'PGPFingerprints', 'picture', 'shirtSize', 'groups' ] # Check for any non-whitelisted attribute modification # Note that no extra attribute may be added at this stage as we already performed schema validation for attr in user: if attr not in whitelist: if profile_json.get(attr) != user.get(attr): logger.exception('permission denied: publisher {} attempted to modify user attributes it has no authority over'.format(publisher)) return False ## Validate namespaced groups only come from Mozillians.org # This is the whitelisted group prefix for this publisher: # group sub structure looks like: user.groups = [ 'group_from_ldap1', 'moziliansorg_mytestgroup', ...] prefix = 'mozilliansorg_' old_groups = user.get('groups') new_groups = profile_json.get('groups') # Check is we have any non-mozilliansorg group that has been *removed* for g in old_groups: if not g.startswith(prefix): if g not in new_groups: logger.exception('permission denied: publisher {} attempted to remove groups it has no authority over'.format(publisher)) return False # Check is we have any non-mozilliansorg group that has been *added* for g in new_groups: if not g.startswith(prefix): if g not in old_groups: logger.exception('permission denied: publisher {} attempted to add groups it has no authority over'.format(publisher)) return False return True
<commit_before>import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True # XXX TODO Validate groups only come from Mozillians.org return True <commit_msg>Add mozillians.org validation checks (tests passed)<commit_after>import logging logger = logging.getLogger(__name__) def run(publisher, user, profile_json): """ Check if a user mozillian's profile properly namespaces mozillians groups and whitelists which fields mozillians.org has authority over. :publisher: The CIS publisher :user: The user from the CIS vault :profile_json: The user profile passed by the publisher """ # This plugin only cares about mozillians.org publisher if publisher != 'mozillians.org': return True ## Validate only whitelisted fields for this publisher are in use whitelist = [ 'timezone', 'displayName', 'firstName', 'lastName', 'preferredLanguage', 'primaryEmail', 'emails', 'phoneNumbers', 'uris', 'nicknames', 'SSHFingerprints', 'PGPFingerprints', 'picture', 'shirtSize', 'groups' ] # Check for any non-whitelisted attribute modification # Note that no extra attribute may be added at this stage as we already performed schema validation for attr in user: if attr not in whitelist: if profile_json.get(attr) != user.get(attr): logger.exception('permission denied: publisher {} attempted to modify user attributes it has no authority over'.format(publisher)) return False ## Validate namespaced groups only come from Mozillians.org # This is the whitelisted group prefix for this publisher: # group sub structure looks like: user.groups = [ 'group_from_ldap1', 'moziliansorg_mytestgroup', ...] prefix = 'mozilliansorg_' old_groups = user.get('groups') new_groups = profile_json.get('groups') # Check is we have any non-mozilliansorg group that has been *removed* for g in old_groups: if not g.startswith(prefix): if g not in new_groups: logger.exception('permission denied: publisher {} attempted to remove groups it has no authority over'.format(publisher)) return False # Check is we have any non-mozilliansorg group that has been *added* for g in new_groups: if not g.startswith(prefix): if g not in old_groups: logger.exception('permission denied: publisher {} attempted to add groups it has no authority over'.format(publisher)) return False return True
bf3218f9d125c9b4072fc99d108fe936578c79e0
dbversions/versions/44dccb7b8b82_update_username_to_l.py
dbversions/versions/44dccb7b8b82_update_username_to_l.py
"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) stmt = users.update().\ values(username=sa.func.lower(users.c.username)) connection.execute(stmt) def downgrade(): pass
"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) bmarks = sa.Table('bmarks', meta, autoload=True) try: op.drop_constraint("bmarks_username_fkey", "bmarks") print 'dropped constraint' except sa.exc.OperationalError: # If it's not supported then pass pass sel = sa.select([users]) for user in connection.execute(sel): print 'updating for user: ' + user['username'] lowered = sa.func.lower(user['username']) stmt = users.update().\ where(users.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) stmt = bmarks.update().\ where(bmarks.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) print 'done user: ' + user['username'] try: op.create_foreign_key( "bmarks_username_fkey", "bmarks", "users", ["username"], ["username"]) print 'added constraint' except sa.exc.OperationalError: # If it's not supported then pass pass def downgrade(): pass
Fix the lowercase migration to work in other dbs
Fix the lowercase migration to work in other dbs
Python
agpl-3.0
wangjun/Bookie,bookieio/Bookie,charany1/Bookie,adamlincoln/Bookie,charany1/Bookie,wangjun/Bookie,GreenLunar/Bookie,teodesson/Bookie,GreenLunar/Bookie,bookieio/Bookie,adamlincoln/Bookie,bookieio/Bookie,wangjun/Bookie,skmezanul/Bookie,teodesson/Bookie,teodesson/Bookie,bookieio/Bookie,wangjun/Bookie,teodesson/Bookie,GreenLunar/Bookie,skmezanul/Bookie,skmezanul/Bookie,charany1/Bookie,adamlincoln/Bookie,skmezanul/Bookie,adamlincoln/Bookie,GreenLunar/Bookie
"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) stmt = users.update().\ values(username=sa.func.lower(users.c.username)) connection.execute(stmt) def downgrade(): pass Fix the lowercase migration to work in other dbs
"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) bmarks = sa.Table('bmarks', meta, autoload=True) try: op.drop_constraint("bmarks_username_fkey", "bmarks") print 'dropped constraint' except sa.exc.OperationalError: # If it's not supported then pass pass sel = sa.select([users]) for user in connection.execute(sel): print 'updating for user: ' + user['username'] lowered = sa.func.lower(user['username']) stmt = users.update().\ where(users.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) stmt = bmarks.update().\ where(bmarks.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) print 'done user: ' + user['username'] try: op.create_foreign_key( "bmarks_username_fkey", "bmarks", "users", ["username"], ["username"]) print 'added constraint' except sa.exc.OperationalError: # If it's not supported then pass pass def downgrade(): pass
<commit_before>"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) stmt = users.update().\ values(username=sa.func.lower(users.c.username)) connection.execute(stmt) def downgrade(): pass <commit_msg>Fix the lowercase migration to work in other dbs<commit_after>
"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) bmarks = sa.Table('bmarks', meta, autoload=True) try: op.drop_constraint("bmarks_username_fkey", "bmarks") print 'dropped constraint' except sa.exc.OperationalError: # If it's not supported then pass pass sel = sa.select([users]) for user in connection.execute(sel): print 'updating for user: ' + user['username'] lowered = sa.func.lower(user['username']) stmt = users.update().\ where(users.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) stmt = bmarks.update().\ where(bmarks.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) print 'done user: ' + user['username'] try: op.create_foreign_key( "bmarks_username_fkey", "bmarks", "users", ["username"], ["username"]) print 'added constraint' except sa.exc.OperationalError: # If it's not supported then pass pass def downgrade(): pass
"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) stmt = users.update().\ values(username=sa.func.lower(users.c.username)) connection.execute(stmt) def downgrade(): pass Fix the lowercase migration to work in other dbs"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) bmarks = sa.Table('bmarks', meta, autoload=True) try: op.drop_constraint("bmarks_username_fkey", "bmarks") print 'dropped constraint' except sa.exc.OperationalError: # If it's not supported then pass pass sel = sa.select([users]) for user in connection.execute(sel): print 'updating for user: ' + user['username'] lowered = sa.func.lower(user['username']) stmt = users.update().\ where(users.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) stmt = bmarks.update().\ where(bmarks.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) print 'done user: ' + user['username'] try: op.create_foreign_key( "bmarks_username_fkey", "bmarks", "users", ["username"], ["username"]) print 'added constraint' except sa.exc.OperationalError: # If it's not supported then pass pass def downgrade(): pass
<commit_before>"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) stmt = users.update().\ values(username=sa.func.lower(users.c.username)) connection.execute(stmt) def downgrade(): pass <commit_msg>Fix the lowercase migration to work in other dbs<commit_after>"""update username to lowercase Revision ID: 44dccb7b8b82 Revises: 9f274a38d84 Create Date: 2014-02-27 00:55:59.913206 """ # revision identifiers, used by Alembic. revision = '44dccb7b8b82' down_revision = '9f274a38d84' from alembic import op import sqlalchemy as sa def upgrade(): connection = op.get_bind() current_context = op.get_context() meta = current_context.opts['target_metadata'] users = sa.Table('users', meta, autoload=True) bmarks = sa.Table('bmarks', meta, autoload=True) try: op.drop_constraint("bmarks_username_fkey", "bmarks") print 'dropped constraint' except sa.exc.OperationalError: # If it's not supported then pass pass sel = sa.select([users]) for user in connection.execute(sel): print 'updating for user: ' + user['username'] lowered = sa.func.lower(user['username']) stmt = users.update().\ where(users.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) stmt = bmarks.update().\ where(bmarks.c.username == user['username']).\ values(username=lowered) connection.execute(stmt) print 'done user: ' + user['username'] try: op.create_foreign_key( "bmarks_username_fkey", "bmarks", "users", ["username"], ["username"]) print 'added constraint' except sa.exc.OperationalError: # If it's not supported then pass pass def downgrade(): pass
33b640b1d9ea11cd28eb82631d8e34c9e2e31c10
molecule/testinfra/common/test_basic_configuration.py
molecule/testinfra/common/test_basic_configuration.py
import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host): if host.system_info.codename == "xenial": c = host.run("timedatectl status") assert "Network time on: yes" in c.stdout assert "NTP synchronized: yes" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout
from testinfra.host import Host import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host: Host) -> None: if host.system_info.codename == "xenial": assert host.package("ntp").is_installed assert host.package("ntpdate").is_installed # TODO: The staging setup timing is too erratic for the # following check. If we do want to reinstate it before # dropping Xenial support, it should be done in a loop to give # ntpd time to sync after the machines are created. # c = host.run("ntpq -c rv") # assert "leap_none" in c.stdout # assert "sync_ntp" in c.stdout # assert "refid" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout
Change test_system_time check on Xenial
Change test_system_time check on Xenial
Python
agpl-3.0
conorsch/securedrop,conorsch/securedrop,conorsch/securedrop,conorsch/securedrop,conorsch/securedrop
import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host): if host.system_info.codename == "xenial": c = host.run("timedatectl status") assert "Network time on: yes" in c.stdout assert "NTP synchronized: yes" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout Change test_system_time check on Xenial
from testinfra.host import Host import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host: Host) -> None: if host.system_info.codename == "xenial": assert host.package("ntp").is_installed assert host.package("ntpdate").is_installed # TODO: The staging setup timing is too erratic for the # following check. If we do want to reinstate it before # dropping Xenial support, it should be done in a loop to give # ntpd time to sync after the machines are created. # c = host.run("ntpq -c rv") # assert "leap_none" in c.stdout # assert "sync_ntp" in c.stdout # assert "refid" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout
<commit_before>import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host): if host.system_info.codename == "xenial": c = host.run("timedatectl status") assert "Network time on: yes" in c.stdout assert "NTP synchronized: yes" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout <commit_msg>Change test_system_time check on Xenial<commit_after>
from testinfra.host import Host import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host: Host) -> None: if host.system_info.codename == "xenial": assert host.package("ntp").is_installed assert host.package("ntpdate").is_installed # TODO: The staging setup timing is too erratic for the # following check. If we do want to reinstate it before # dropping Xenial support, it should be done in a loop to give # ntpd time to sync after the machines are created. # c = host.run("ntpq -c rv") # assert "leap_none" in c.stdout # assert "sync_ntp" in c.stdout # assert "refid" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout
import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host): if host.system_info.codename == "xenial": c = host.run("timedatectl status") assert "Network time on: yes" in c.stdout assert "NTP synchronized: yes" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout Change test_system_time check on Xenialfrom testinfra.host import Host import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host: Host) -> None: if host.system_info.codename == "xenial": assert host.package("ntp").is_installed assert host.package("ntpdate").is_installed # TODO: The staging setup timing is too erratic for the # following check. If we do want to reinstate it before # dropping Xenial support, it should be done in a loop to give # ntpd time to sync after the machines are created. # c = host.run("ntpq -c rv") # assert "leap_none" in c.stdout # assert "sync_ntp" in c.stdout # assert "refid" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout
<commit_before>import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host): if host.system_info.codename == "xenial": c = host.run("timedatectl status") assert "Network time on: yes" in c.stdout assert "NTP synchronized: yes" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout <commit_msg>Change test_system_time check on Xenial<commit_after>from testinfra.host import Host import testutils test_vars = testutils.securedrop_test_vars testinfra_hosts = [test_vars.app_hostname, test_vars.monitor_hostname] def test_system_time(host: Host) -> None: if host.system_info.codename == "xenial": assert host.package("ntp").is_installed assert host.package("ntpdate").is_installed # TODO: The staging setup timing is too erratic for the # following check. If we do want to reinstate it before # dropping Xenial support, it should be done in a loop to give # ntpd time to sync after the machines are created. # c = host.run("ntpq -c rv") # assert "leap_none" in c.stdout # assert "sync_ntp" in c.stdout # assert "refid" in c.stdout else: assert not host.package("ntp").is_installed assert not host.package("ntpdate").is_installed s = host.service("systemd-timesyncd") assert s.is_running assert s.is_enabled assert not s.is_masked # File will be touched on every successful synchronization, # see 'man systemd-timesyncd'` assert host.file("/run/systemd/timesync/synchronized").exists c = host.run("timedatectl show") assert "NTP=yes" in c.stdout assert "NTPSynchronized=yes" in c.stdout
1e55d82bee4360608d5053ac05c8a62f57d72bf7
erpnext_ebay/custom_methods/website_slideshow_methods.py
erpnext_ebay/custom_methods/website_slideshow_methods.py
# -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > 12: frappe.throw('Number of eBay images must be 12 or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!')
# -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe MAX_EBAY_IMAGES = 12 def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > MAX_EBAY_IMAGES: frappe.throw( f'Number of eBay images must be {MAX_EBAY_IMAGES} or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!')
Use parameter for maximum number of eBay images
fix: Use parameter for maximum number of eBay images
Python
mit
bglazier/erpnext_ebay,bglazier/erpnext_ebay
# -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > 12: frappe.throw('Number of eBay images must be 12 or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!') fix: Use parameter for maximum number of eBay images
# -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe MAX_EBAY_IMAGES = 12 def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > MAX_EBAY_IMAGES: frappe.throw( f'Number of eBay images must be {MAX_EBAY_IMAGES} or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!')
<commit_before># -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > 12: frappe.throw('Number of eBay images must be 12 or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!') <commit_msg>fix: Use parameter for maximum number of eBay images<commit_after>
# -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe MAX_EBAY_IMAGES = 12 def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > MAX_EBAY_IMAGES: frappe.throw( f'Number of eBay images must be {MAX_EBAY_IMAGES} or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!')
# -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > 12: frappe.throw('Number of eBay images must be 12 or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!') fix: Use parameter for maximum number of eBay images# -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe MAX_EBAY_IMAGES = 12 def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > MAX_EBAY_IMAGES: frappe.throw( f'Number of eBay images must be {MAX_EBAY_IMAGES} or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!')
<commit_before># -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > 12: frappe.throw('Number of eBay images must be 12 or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!') <commit_msg>fix: Use parameter for maximum number of eBay images<commit_after># -*- coding: utf-8 -*- """Custom methods for Item doctype""" import frappe MAX_EBAY_IMAGES = 12 def website_slideshow_validate(doc, _method): """On Website Slideshow validate docevent.""" if doc.number_of_ebay_images > MAX_EBAY_IMAGES: frappe.throw( f'Number of eBay images must be {MAX_EBAY_IMAGES} or fewer!') if doc.number_of_ebay_images < 1: frappe.throw('Number of eBay images must be 1 or greater!')
cb456cbdb8850fda4b438d7f60b3aa00365f7f9b
__init__.py
__init__.py
"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ import numpy as np from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit']
"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ from numpy.random import seed as np_seed from random import seed as python_seed from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] def random_seed(i): """ Set global random seed for all underlying components. Use 'brute-force' approach, by setting undelying libraries' seeds. Parameters ---------- i: int integer used as seed for random number generators """ # python's random module python_seed(i) # numpy random module np_seed(i)
Make global utulity for setting random seed
Make global utulity for setting random seed
Python
bsd-3-clause
mwojcikowski/opendrugdiscovery
"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ import numpy as np from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] Make global utulity for setting random seed
"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ from numpy.random import seed as np_seed from random import seed as python_seed from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] def random_seed(i): """ Set global random seed for all underlying components. Use 'brute-force' approach, by setting undelying libraries' seeds. Parameters ---------- i: int integer used as seed for random number generators """ # python's random module python_seed(i) # numpy random module np_seed(i)
<commit_before>"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ import numpy as np from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] <commit_msg>Make global utulity for setting random seed<commit_after>
"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ from numpy.random import seed as np_seed from random import seed as python_seed from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] def random_seed(i): """ Set global random seed for all underlying components. Use 'brute-force' approach, by setting undelying libraries' seeds. Parameters ---------- i: int integer used as seed for random number generators """ # python's random module python_seed(i) # numpy random module np_seed(i)
"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ import numpy as np from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] Make global utulity for setting random seed"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ from numpy.random import seed as np_seed from random import seed as python_seed from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] def random_seed(i): """ Set global random seed for all underlying components. Use 'brute-force' approach, by setting undelying libraries' seeds. Parameters ---------- i: int integer used as seed for random number generators """ # python's random module python_seed(i) # numpy random module np_seed(i)
<commit_before>"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ import numpy as np from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] <commit_msg>Make global utulity for setting random seed<commit_after>"""Open Drug Discovery Toolkit ============================== Universal and easy to use resource for various drug discovery tasks, ie docking, virutal screening, rescoring. Attributes ---------- toolkit : module, Toolkits backend module, currenlty OpenBabel [ob] and RDKit [rdk]. This setting is toolkit-wide, and sets given toolkit as default """ from numpy.random import seed as np_seed from random import seed as python_seed from .toolkits import ob, rdk toolkit = ob __all__ = ['toolkit'] def random_seed(i): """ Set global random seed for all underlying components. Use 'brute-force' approach, by setting undelying libraries' seeds. Parameters ---------- i: int integer used as seed for random number generators """ # python's random module python_seed(i) # numpy random module np_seed(i)
b7a65957d1d365da727bbf42761edd93fd5941a1
03-Blink-LED-When-New-Email-Arrives/Gmail.py
03-Blink-LED-When-New-Email-Arrives/Gmail.py
from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 22 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor()
from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 35 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor()
Change pin LED is connected to.
Change pin LED is connected to.
Python
mit
grantwinney/52-Weeks-of-Pi
from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 22 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor() Change pin LED is connected to.
from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 35 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor()
<commit_before>from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 22 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor() <commit_msg>Change pin LED is connected to.<commit_after>
from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 35 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor()
from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 22 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor() Change pin LED is connected to.from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 35 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor()
<commit_before>from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 22 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor() <commit_msg>Change pin LED is connected to.<commit_after>from apiclient import errors import threading import time import RPi.GPIO as GPIO import GmailAuthorization PIN = 35 CHECK_INTERVAL = 30 service = None unread_count = 0 def refresh(): global unread_count try: messages = service.users().messages().list(userId='me', q='is:inbox + is:unread').execute() unread_count = messages['resultSizeEstimate'] except errors.HttpError as error: print('An error occurred: {0}'.format(error)) def indicator(): while True: if unread_count > 0: GPIO.output(PIN, not GPIO.input(PIN)) else: GPIO.output(PIN, GPIO.LOW) time.sleep(0.5) def monitor(): while True: refresh() time.sleep(CHECK_INTERVAL) def start_indicator(): t = threading.Thread(target=indicator) t.daemon = True t.start() def start_monitor(): t = threading.Thread(target=monitor) t.daemon = True t.start() def load_service(): global service service = GmailAuthorization.get_service() def start(): load_service() start_indicator() start_monitor()
38bd32fdfa345799e510ee75021293c124a4d21c
api/base/settings/__init__.py
api/base/settings/__init__.py
# -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = institution_origins + preprintprovider_origins
# -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os from urlparse import urlparse import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = (urlparse(url).geturl().lower().split('{}://'.format(urlparse(url).scheme))[-1] for url in institution_origins + preprintprovider_origins)
Fix whitelist construction -Note: institutions are schemeless, causing furl and urlparse to parse the domain as `path`. PrePriPro domai ns are validated, so they necessarily have a scheme, causing the domain to end up in `netloc`. -Do some ugly magic to get only the domain.
Fix whitelist construction -Note: institutions are schemeless, causing furl and urlparse to parse the domain as `path`. PrePriPro domai ns are validated, so they necessarily have a scheme, causing the domain to end up in `netloc`. -Do some ugly magic to get only the domain.
Python
apache-2.0
caneruguz/osf.io,chennan47/osf.io,aaxelb/osf.io,crcresearch/osf.io,mattclark/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,felliott/osf.io,adlius/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,adlius/osf.io,mattclark/osf.io,brianjgeiger/osf.io,erinspace/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,crcresearch/osf.io,sloria/osf.io,laurenrevere/osf.io,leb2dg/osf.io,aaxelb/osf.io,pattisdr/osf.io,binoculars/osf.io,caneruguz/osf.io,leb2dg/osf.io,baylee-d/osf.io,cslzchen/osf.io,saradbowman/osf.io,adlius/osf.io,binoculars/osf.io,chrisseto/osf.io,chrisseto/osf.io,leb2dg/osf.io,TomBaxter/osf.io,pattisdr/osf.io,icereval/osf.io,chennan47/osf.io,icereval/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,sloria/osf.io,adlius/osf.io,caneruguz/osf.io,TomBaxter/osf.io,pattisdr/osf.io,laurenrevere/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,erinspace/osf.io,icereval/osf.io,laurenrevere/osf.io,felliott/osf.io,mfraezz/osf.io,felliott/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,sloria/osf.io,caneruguz/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,crcresearch/osf.io,chrisseto/osf.io,mattclark/osf.io,leb2dg/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,baylee-d/osf.io,saradbowman/osf.io,baylee-d/osf.io,binoculars/osf.io,brianjgeiger/osf.io,aaxelb/osf.io
# -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = institution_origins + preprintprovider_origins Fix whitelist construction -Note: institutions are schemeless, causing furl and urlparse to parse the domain as `path`. PrePriPro domai ns are validated, so they necessarily have a scheme, causing the domain to end up in `netloc`. -Do some ugly magic to get only the domain.
# -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os from urlparse import urlparse import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = (urlparse(url).geturl().lower().split('{}://'.format(urlparse(url).scheme))[-1] for url in institution_origins + preprintprovider_origins)
<commit_before># -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = institution_origins + preprintprovider_origins <commit_msg>Fix whitelist construction -Note: institutions are schemeless, causing furl and urlparse to parse the domain as `path`. PrePriPro domai ns are validated, so they necessarily have a scheme, causing the domain to end up in `netloc`. -Do some ugly magic to get only the domain.<commit_after>
# -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os from urlparse import urlparse import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = (urlparse(url).geturl().lower().split('{}://'.format(urlparse(url).scheme))[-1] for url in institution_origins + preprintprovider_origins)
# -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = institution_origins + preprintprovider_origins Fix whitelist construction -Note: institutions are schemeless, causing furl and urlparse to parse the domain as `path`. PrePriPro domai ns are validated, so they necessarily have a scheme, causing the domain to end up in `netloc`. -Do some ugly magic to get only the domain.# -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os from urlparse import urlparse import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = (urlparse(url).geturl().lower().split('{}://'.format(urlparse(url).scheme))[-1] for url in institution_origins + preprintprovider_origins)
<commit_before># -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = institution_origins + preprintprovider_origins <commit_msg>Fix whitelist construction -Note: institutions are schemeless, causing furl and urlparse to parse the domain as `path`. PrePriPro domai ns are validated, so they necessarily have a scheme, causing the domain to end up in `netloc`. -Do some ugly magic to get only the domain.<commit_after># -*- coding: utf-8 -*- '''Consolidates settings from defaults.py and local.py. :: >>> from api.base import settings >>> settings.API_BASE 'v2/' ''' import os from urlparse import urlparse import warnings import itertools from .defaults import * # noqa try: from .local import * # noqa except ImportError as error: warnings.warn('No api/base/settings/local.py settings file found. Did you remember to ' 'copy local-dist.py to local.py?', ImportWarning) if not DEV_MODE and os.environ.get('DJANGO_SETTINGS_MODULE') == 'api.base.settings': from . import local from . import defaults for setting in ('JWE_SECRET', 'JWT_SECRET', 'BYPASS_THROTTLE_TOKEN'): assert getattr(local, setting, None) and getattr(local, setting, None) != getattr(defaults, setting, None), '{} must be specified in local.py when DEV_MODE is False'.format(setting) def load_origins_whitelist(): global ORIGINS_WHITELIST from osf.models import Institution, PreprintProvider institution_origins = tuple(domain.lower() for domain in itertools.chain(*[ institution.domains for institution in Institution.find() ])) preprintprovider_origins = tuple(preprintprovider.domain.lower() for preprintprovider in PreprintProvider.objects.exclude(domain='')) ORIGINS_WHITELIST = (urlparse(url).geturl().lower().split('{}://'.format(urlparse(url).scheme))[-1] for url in institution_origins + preprintprovider_origins)
1944a78c3c224d78551c77bb7573efeba3d90351
config/uwsgi/websiterunner.py
config/uwsgi/websiterunner.py
import os import dotenv import newrelic.agent from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) # Set up NewRelic Agent if os.environ['ENV'] in ['production', 'staging']: newrelic_ini = os.path.join(git_root.path, 'config', 'newrelic.ini') newrelic.agent.initialize(newrelic_ini, os.environ['ENV']) from app.serve import * # noqa
import os import dotenv from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) from app.serve import * # noqa
Remove newrelic package from python app
Remove newrelic package from python app
Python
mit
albertyw/base-flask,albertyw/base-flask,albertyw/base-flask,albertyw/base-flask
import os import dotenv import newrelic.agent from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) # Set up NewRelic Agent if os.environ['ENV'] in ['production', 'staging']: newrelic_ini = os.path.join(git_root.path, 'config', 'newrelic.ini') newrelic.agent.initialize(newrelic_ini, os.environ['ENV']) from app.serve import * # noqa Remove newrelic package from python app
import os import dotenv from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) from app.serve import * # noqa
<commit_before>import os import dotenv import newrelic.agent from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) # Set up NewRelic Agent if os.environ['ENV'] in ['production', 'staging']: newrelic_ini = os.path.join(git_root.path, 'config', 'newrelic.ini') newrelic.agent.initialize(newrelic_ini, os.environ['ENV']) from app.serve import * # noqa <commit_msg>Remove newrelic package from python app<commit_after>
import os import dotenv from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) from app.serve import * # noqa
import os import dotenv import newrelic.agent from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) # Set up NewRelic Agent if os.environ['ENV'] in ['production', 'staging']: newrelic_ini = os.path.join(git_root.path, 'config', 'newrelic.ini') newrelic.agent.initialize(newrelic_ini, os.environ['ENV']) from app.serve import * # noqa Remove newrelic package from python appimport os import dotenv from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) from app.serve import * # noqa
<commit_before>import os import dotenv import newrelic.agent from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) # Set up NewRelic Agent if os.environ['ENV'] in ['production', 'staging']: newrelic_ini = os.path.join(git_root.path, 'config', 'newrelic.ini') newrelic.agent.initialize(newrelic_ini, os.environ['ENV']) from app.serve import * # noqa <commit_msg>Remove newrelic package from python app<commit_after>import os import dotenv from syspath import git_root dotenv.load_dotenv(os.path.join(git_root.path, '.env')) from app.serve import * # noqa
b220f81dc6cfac4d75d923aa8d207d6ae756a134
readthedocs/builds/forms.py
readthedocs/builds/forms.py
from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj)
from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) return obj
Fix issue where VersionForm wasn't returning object on save
Fix issue where VersionForm wasn't returning object on save
Python
mit
pombredanne/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,pombredanne/readthedocs.org,davidfischer/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,istresearch/readthedocs.org,istresearch/readthedocs.org,rtfd/readthedocs.org,stevepiercy/readthedocs.org,davidfischer/readthedocs.org,stevepiercy/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,stevepiercy/readthedocs.org,tddv/readthedocs.org,istresearch/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,davidfischer/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,espdev/readthedocs.org
from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) Fix issue where VersionForm wasn't returning object on save
from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) return obj
<commit_before>from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) <commit_msg>Fix issue where VersionForm wasn't returning object on save<commit_after>
from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) return obj
from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) Fix issue where VersionForm wasn't returning object on savefrom django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) return obj
<commit_before>from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) <commit_msg>Fix issue where VersionForm wasn't returning object on save<commit_after>from django import forms from readthedocs.builds.models import VersionAlias, Version from readthedocs.projects.models import Project from readthedocs.core.utils import trigger_build class AliasForm(forms.ModelForm): class Meta: model = VersionAlias fields = ( 'project', 'from_slug', 'to_slug', 'largest', ) def __init__(self, instance=None, *args, **kwargs): super(AliasForm, self).__init__(instance=instance, *args, **kwargs) if instance: self.fields['project'].queryset = (Project.objects .filter(pk=instance.project.pk)) class VersionForm(forms.ModelForm): class Meta: model = Version fields = ['active', 'privacy_level', 'tags'] def save(self, *args, **kwargs): obj = super(VersionForm, self).save(*args, **kwargs) if obj.active and not obj.built and not obj.uploaded: trigger_build(project=obj.project, version=obj) return obj
623845a2651429b55b7a606333290aa090178334
learntools/computer_vision/ex1.py
learntools/computer_vision/ex1.py
from learntools.core import * class Q1(EqualityCheckProblem): _var = "pretrained_base.trainable" _expected = False _hint = "" _solution = CS("pretrained_base.trainable = False") class Q2(CodingProblem): _hint = "" _solution = CS( """ model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self): # type check `Dense`, attributes, etc. pass class Q3(EqualityCheckProblem): _var = "loss" _expected = 'binary_crossentropy' _hint = "" _solution = CS("loss = 'binary_cross_entropy'") class Q4(ThoughtExperiment): _solution = ""
from learntools.core import * class Q1(EqualityCheckProblem): _var = 'pretrained_base.trainable' _expected = False _hint = """ When doing transfer learning, it's generally not a good idea to retrain the entire base -- at least not without some care. The reason is that the random weights in the head will initially create large gradient updates, which propogate back into the base layers and destroy much of the pretraining. Using techniques known as **fine tuning** it's possible to further train the base on new data, but this requires some care to do well. """ _solution = CS('pretrained_base.trainable = False') class Q2(CodingProblem): _var = 'model' _hint = "You need to add two new `Dense` layers. Everything in `Sequential` should end up the same as in the tutorial." _solution = CS(""" import tensorflow.keras as keras import tensorflow.keras.layers as layers model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self, model): layer_classes = [layer.__class__.__name__ for layer in model.layers] assert (len(model.layers) == 4, "You should have four lines inside of `Sequential`. You had {}.".format(len(model.layers))) assert (layer_classes[2] == 'Dense' and layer_classes[3] == 'Dense', "The two layers you add should both be `Dense` layers. You added a {} layer and a {} layer.".format(layer_classes[2], layer_classes[3])) # assert ( , ) # TODO: parameter check # TODO: change to coding problem class Q3(EqualityCheckProblem): _vars = ['loss', 'accuracy'] _expected = ['binary_crossentropy', 'binary_accuracy'] _hint = "These are the same as in the tutorial." _solution = CS(""" loss = 'binary_crossentropy' accuracy = 'binary_accuracy' """) class Q4(ThoughtExperiment): _solution = """ That the training loss and validation loss stay fairly close is evidence that the model isn't just memorizing the training data, but rather learning general properties of the two classes. But, because this model converges at a loss greater than the VGG16 model, it's likely that it is underfitting some, and could benefit from some extra capacity. """
Add checking code for exercise 1
Add checking code for exercise 1
Python
apache-2.0
Kaggle/learntools,Kaggle/learntools
from learntools.core import * class Q1(EqualityCheckProblem): _var = "pretrained_base.trainable" _expected = False _hint = "" _solution = CS("pretrained_base.trainable = False") class Q2(CodingProblem): _hint = "" _solution = CS( """ model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self): # type check `Dense`, attributes, etc. pass class Q3(EqualityCheckProblem): _var = "loss" _expected = 'binary_crossentropy' _hint = "" _solution = CS("loss = 'binary_cross_entropy'") class Q4(ThoughtExperiment): _solution = "" Add checking code for exercise 1
from learntools.core import * class Q1(EqualityCheckProblem): _var = 'pretrained_base.trainable' _expected = False _hint = """ When doing transfer learning, it's generally not a good idea to retrain the entire base -- at least not without some care. The reason is that the random weights in the head will initially create large gradient updates, which propogate back into the base layers and destroy much of the pretraining. Using techniques known as **fine tuning** it's possible to further train the base on new data, but this requires some care to do well. """ _solution = CS('pretrained_base.trainable = False') class Q2(CodingProblem): _var = 'model' _hint = "You need to add two new `Dense` layers. Everything in `Sequential` should end up the same as in the tutorial." _solution = CS(""" import tensorflow.keras as keras import tensorflow.keras.layers as layers model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self, model): layer_classes = [layer.__class__.__name__ for layer in model.layers] assert (len(model.layers) == 4, "You should have four lines inside of `Sequential`. You had {}.".format(len(model.layers))) assert (layer_classes[2] == 'Dense' and layer_classes[3] == 'Dense', "The two layers you add should both be `Dense` layers. You added a {} layer and a {} layer.".format(layer_classes[2], layer_classes[3])) # assert ( , ) # TODO: parameter check # TODO: change to coding problem class Q3(EqualityCheckProblem): _vars = ['loss', 'accuracy'] _expected = ['binary_crossentropy', 'binary_accuracy'] _hint = "These are the same as in the tutorial." _solution = CS(""" loss = 'binary_crossentropy' accuracy = 'binary_accuracy' """) class Q4(ThoughtExperiment): _solution = """ That the training loss and validation loss stay fairly close is evidence that the model isn't just memorizing the training data, but rather learning general properties of the two classes. But, because this model converges at a loss greater than the VGG16 model, it's likely that it is underfitting some, and could benefit from some extra capacity. """
<commit_before>from learntools.core import * class Q1(EqualityCheckProblem): _var = "pretrained_base.trainable" _expected = False _hint = "" _solution = CS("pretrained_base.trainable = False") class Q2(CodingProblem): _hint = "" _solution = CS( """ model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self): # type check `Dense`, attributes, etc. pass class Q3(EqualityCheckProblem): _var = "loss" _expected = 'binary_crossentropy' _hint = "" _solution = CS("loss = 'binary_cross_entropy'") class Q4(ThoughtExperiment): _solution = "" <commit_msg>Add checking code for exercise 1<commit_after>
from learntools.core import * class Q1(EqualityCheckProblem): _var = 'pretrained_base.trainable' _expected = False _hint = """ When doing transfer learning, it's generally not a good idea to retrain the entire base -- at least not without some care. The reason is that the random weights in the head will initially create large gradient updates, which propogate back into the base layers and destroy much of the pretraining. Using techniques known as **fine tuning** it's possible to further train the base on new data, but this requires some care to do well. """ _solution = CS('pretrained_base.trainable = False') class Q2(CodingProblem): _var = 'model' _hint = "You need to add two new `Dense` layers. Everything in `Sequential` should end up the same as in the tutorial." _solution = CS(""" import tensorflow.keras as keras import tensorflow.keras.layers as layers model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self, model): layer_classes = [layer.__class__.__name__ for layer in model.layers] assert (len(model.layers) == 4, "You should have four lines inside of `Sequential`. You had {}.".format(len(model.layers))) assert (layer_classes[2] == 'Dense' and layer_classes[3] == 'Dense', "The two layers you add should both be `Dense` layers. You added a {} layer and a {} layer.".format(layer_classes[2], layer_classes[3])) # assert ( , ) # TODO: parameter check # TODO: change to coding problem class Q3(EqualityCheckProblem): _vars = ['loss', 'accuracy'] _expected = ['binary_crossentropy', 'binary_accuracy'] _hint = "These are the same as in the tutorial." _solution = CS(""" loss = 'binary_crossentropy' accuracy = 'binary_accuracy' """) class Q4(ThoughtExperiment): _solution = """ That the training loss and validation loss stay fairly close is evidence that the model isn't just memorizing the training data, but rather learning general properties of the two classes. But, because this model converges at a loss greater than the VGG16 model, it's likely that it is underfitting some, and could benefit from some extra capacity. """
from learntools.core import * class Q1(EqualityCheckProblem): _var = "pretrained_base.trainable" _expected = False _hint = "" _solution = CS("pretrained_base.trainable = False") class Q2(CodingProblem): _hint = "" _solution = CS( """ model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self): # type check `Dense`, attributes, etc. pass class Q3(EqualityCheckProblem): _var = "loss" _expected = 'binary_crossentropy' _hint = "" _solution = CS("loss = 'binary_cross_entropy'") class Q4(ThoughtExperiment): _solution = "" Add checking code for exercise 1from learntools.core import * class Q1(EqualityCheckProblem): _var = 'pretrained_base.trainable' _expected = False _hint = """ When doing transfer learning, it's generally not a good idea to retrain the entire base -- at least not without some care. The reason is that the random weights in the head will initially create large gradient updates, which propogate back into the base layers and destroy much of the pretraining. Using techniques known as **fine tuning** it's possible to further train the base on new data, but this requires some care to do well. """ _solution = CS('pretrained_base.trainable = False') class Q2(CodingProblem): _var = 'model' _hint = "You need to add two new `Dense` layers. Everything in `Sequential` should end up the same as in the tutorial." _solution = CS(""" import tensorflow.keras as keras import tensorflow.keras.layers as layers model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self, model): layer_classes = [layer.__class__.__name__ for layer in model.layers] assert (len(model.layers) == 4, "You should have four lines inside of `Sequential`. You had {}.".format(len(model.layers))) assert (layer_classes[2] == 'Dense' and layer_classes[3] == 'Dense', "The two layers you add should both be `Dense` layers. You added a {} layer and a {} layer.".format(layer_classes[2], layer_classes[3])) # assert ( , ) # TODO: parameter check # TODO: change to coding problem class Q3(EqualityCheckProblem): _vars = ['loss', 'accuracy'] _expected = ['binary_crossentropy', 'binary_accuracy'] _hint = "These are the same as in the tutorial." _solution = CS(""" loss = 'binary_crossentropy' accuracy = 'binary_accuracy' """) class Q4(ThoughtExperiment): _solution = """ That the training loss and validation loss stay fairly close is evidence that the model isn't just memorizing the training data, but rather learning general properties of the two classes. But, because this model converges at a loss greater than the VGG16 model, it's likely that it is underfitting some, and could benefit from some extra capacity. """
<commit_before>from learntools.core import * class Q1(EqualityCheckProblem): _var = "pretrained_base.trainable" _expected = False _hint = "" _solution = CS("pretrained_base.trainable = False") class Q2(CodingProblem): _hint = "" _solution = CS( """ model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self): # type check `Dense`, attributes, etc. pass class Q3(EqualityCheckProblem): _var = "loss" _expected = 'binary_crossentropy' _hint = "" _solution = CS("loss = 'binary_cross_entropy'") class Q4(ThoughtExperiment): _solution = "" <commit_msg>Add checking code for exercise 1<commit_after>from learntools.core import * class Q1(EqualityCheckProblem): _var = 'pretrained_base.trainable' _expected = False _hint = """ When doing transfer learning, it's generally not a good idea to retrain the entire base -- at least not without some care. The reason is that the random weights in the head will initially create large gradient updates, which propogate back into the base layers and destroy much of the pretraining. Using techniques known as **fine tuning** it's possible to further train the base on new data, but this requires some care to do well. """ _solution = CS('pretrained_base.trainable = False') class Q2(CodingProblem): _var = 'model' _hint = "You need to add two new `Dense` layers. Everything in `Sequential` should end up the same as in the tutorial." _solution = CS(""" import tensorflow.keras as keras import tensorflow.keras.layers as layers model = Sequential([ pretrained_base, layers.Flatten(), layers.Dense(8, activation='relu'), layers.Dense(1, activation='sigmoid'), ]) """) def check(self, model): layer_classes = [layer.__class__.__name__ for layer in model.layers] assert (len(model.layers) == 4, "You should have four lines inside of `Sequential`. You had {}.".format(len(model.layers))) assert (layer_classes[2] == 'Dense' and layer_classes[3] == 'Dense', "The two layers you add should both be `Dense` layers. You added a {} layer and a {} layer.".format(layer_classes[2], layer_classes[3])) # assert ( , ) # TODO: parameter check # TODO: change to coding problem class Q3(EqualityCheckProblem): _vars = ['loss', 'accuracy'] _expected = ['binary_crossentropy', 'binary_accuracy'] _hint = "These are the same as in the tutorial." _solution = CS(""" loss = 'binary_crossentropy' accuracy = 'binary_accuracy' """) class Q4(ThoughtExperiment): _solution = """ That the training loss and validation loss stay fairly close is evidence that the model isn't just memorizing the training data, but rather learning general properties of the two classes. But, because this model converges at a loss greater than the VGG16 model, it's likely that it is underfitting some, and could benefit from some extra capacity. """
7caf955feff0a08dd5d3f46abd63cf5223fb428c
xirvik/logging.py
xirvik/logging.py
from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log
from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: log.setLevel(level) syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log
Set main log level when using only syslog
Set main log level when using only syslog
Python
mit
Tatsh/xirvik-tools
from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log Set main log level when using only syslog
from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: log.setLevel(level) syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log
<commit_before>from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log <commit_msg>Set main log level when using only syslog<commit_after>
from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: log.setLevel(level) syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log
from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log Set main log level when using only syslogfrom logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: log.setLevel(level) syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log
<commit_before>from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log <commit_msg>Set main log level when using only syslog<commit_after>from logging.handlers import SysLogHandler import logging import sys syslogh = None def cleanup(): global syslogh if syslogh: syslogh.close() logging.shutdown() def get_logger(name, level=logging.INFO, verbose=False, debug=False, syslog=False): global syslogh log = logging.getLogger(name) if verbose or debug: log.setLevel(level if not debug else logging.DEBUG) channel = logging.StreamHandler(sys.stdout if debug else sys.stderr) channel.setFormatter(logging.Formatter('%(asctime)s - ' '%(levelname)s - %(message)s')) channel.setLevel(level if not debug else logging.DEBUG) log.addHandler(channel) if syslog: log.setLevel(level) syslogh = SysLogHandler(address='/dev/log') syslogh.setFormatter(logging.Formatter('%(message)s')) syslogh.setLevel(logging.INFO) log.addHandler(syslogh) return log
88610ede4f73c4c4c5543218904f4867fe530e26
dimod/package_info.py
dimod/package_info.py
__version__ = '1.0.0.dev2' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
__version__ = '1.0.0.dev3' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
Update verion 1.0.0.dev2 -> 1.0.0.dev3
Update verion 1.0.0.dev2 -> 1.0.0.dev3
Python
apache-2.0
dwavesystems/dimod,dwavesystems/dimod
__version__ = '1.0.0.dev2' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.' Update verion 1.0.0.dev2 -> 1.0.0.dev3
__version__ = '1.0.0.dev3' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
<commit_before>__version__ = '1.0.0.dev2' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.' <commit_msg>Update verion 1.0.0.dev2 -> 1.0.0.dev3<commit_after>
__version__ = '1.0.0.dev3' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
__version__ = '1.0.0.dev2' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.' Update verion 1.0.0.dev2 -> 1.0.0.dev3__version__ = '1.0.0.dev3' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
<commit_before>__version__ = '1.0.0.dev2' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.' <commit_msg>Update verion 1.0.0.dev2 -> 1.0.0.dev3<commit_after>__version__ = '1.0.0.dev3' __author__ = 'D-Wave Systems Inc.' __authoremail__ = 'acondello@dwavesys.com' __description__ = 'A shared API for binary quadratic model samplers.'
1b4e7ebd4aaa7f506789a112a9338667e955954f
django_git/views.py
django_git/views.py
from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))
from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): print repo, commit return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))
Add newline to end of file
Add newline to end of file Signed-off-by: Seth Buntin <7fa3258757ee476d85f026594ec3f1563305da2c@gmail.com>
Python
bsd-3-clause
sethtrain/django-git,sethtrain/django-git
from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))Add newline to end of file Signed-off-by: Seth Buntin <7fa3258757ee476d85f026594ec3f1563305da2c@gmail.com>
from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): print repo, commit return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))
<commit_before>from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))<commit_msg>Add newline to end of file Signed-off-by: Seth Buntin <7fa3258757ee476d85f026594ec3f1563305da2c@gmail.com><commit_after>
from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): print repo, commit return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))
from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))Add newline to end of file Signed-off-by: Seth Buntin <7fa3258757ee476d85f026594ec3f1563305da2c@gmail.com>from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): print repo, commit return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))
<commit_before>from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))<commit_msg>Add newline to end of file Signed-off-by: Seth Buntin <7fa3258757ee476d85f026594ec3f1563305da2c@gmail.com><commit_after>from pygments import highlight from pygments.lexers import guess_lexer_for_filename from pygments.formatters import HtmlFormatter from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404, get_list_or_404 from django.template import RequestContext from django_git.utils import * def index(request, template_name='django_git/index.html'): return render_to_response(template_name, {'repos': get_repos()}, context_instance=RequestContext(request)) def repo(request, repo, template_name='django_git/repo.html'): return render_to_response(template_name, {'repo': get_repo(repo)}, context_instance=RequestContext(request)) def commit(request, repo, commit, template_name='django_git/commit.html'): print repo, commit return render_to_response(template_name, {'diffs': get_commit(repo, commit).diffs, 'repo': get_repo(repo), 'commit': commit }, context_instance=RequestContext(request)) def blob(request, repo, commit): file = request.GET.get('file', '') blob = get_blob(repo, commit, file) lexer = guess_lexer_for_filename(blob.basename, blob.data) return HttpResponse(highlight(blob.data, lexer, HtmlFormatter(cssclass="pygment_highlight", linenos='inline')))
72fd4e20a537ff5ff0f454ba552ecb6e4d09b76d
test/test_gn.py
test/test_gn.py
''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) if __name__ == '__main__': unittest.main()
''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' TEST_DIR_TWO_SUB = '/tmp/testtwosub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) def test_multiple_subdirectories(self): ''' If there are multiple subdirectories in the base directory, find the one created most recently and return it ''' self.assertEquals('temp2', gn.transfer(TEST_DIR_TWO_SUB)) if __name__ == '__main__': unittest.main()
Add test for multiple subdirectories
Add test for multiple subdirectories
Python
bsd-2-clause
ambidextrousTx/GotoNewest
''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) if __name__ == '__main__': unittest.main() Add test for multiple subdirectories
''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' TEST_DIR_TWO_SUB = '/tmp/testtwosub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) def test_multiple_subdirectories(self): ''' If there are multiple subdirectories in the base directory, find the one created most recently and return it ''' self.assertEquals('temp2', gn.transfer(TEST_DIR_TWO_SUB)) if __name__ == '__main__': unittest.main()
<commit_before>''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) if __name__ == '__main__': unittest.main() <commit_msg>Add test for multiple subdirectories<commit_after>
''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' TEST_DIR_TWO_SUB = '/tmp/testtwosub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) def test_multiple_subdirectories(self): ''' If there are multiple subdirectories in the base directory, find the one created most recently and return it ''' self.assertEquals('temp2', gn.transfer(TEST_DIR_TWO_SUB)) if __name__ == '__main__': unittest.main()
''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) if __name__ == '__main__': unittest.main() Add test for multiple subdirectories''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' TEST_DIR_TWO_SUB = '/tmp/testtwosub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) def test_multiple_subdirectories(self): ''' If there are multiple subdirectories in the base directory, find the one created most recently and return it ''' self.assertEquals('temp2', gn.transfer(TEST_DIR_TWO_SUB)) if __name__ == '__main__': unittest.main()
<commit_before>''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) if __name__ == '__main__': unittest.main() <commit_msg>Add test for multiple subdirectories<commit_after>''' GotoNewest tests ''' import unittest import sys sys.path.append('../src/') import gn TEST_DIR_NO_SUB = '/tmp/testnosub' TEST_DIR_ONE_SUB = '/tmp/testonesub' TEST_DIR_TWO_SUB = '/tmp/testtwosub' class TestGotoNewest(unittest.TestCase): ''' Test class for GotoNewest ''' def test_empty_base_dir(self): ''' If the base directory is empty, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, None) def test_base_dir_with_no_subdirs(self): ''' If the base directory has no subdirectories, raise an AttributeError ''' self.assertRaises(AttributeError, gn.transfer, TEST_DIR_NO_SUB) def test_one_subdirectory(self): ''' If there is only one subdirectory in the base directory, return the directory ''' self.assertEquals('temp', gn.transfer(TEST_DIR_ONE_SUB)) def test_multiple_subdirectories(self): ''' If there are multiple subdirectories in the base directory, find the one created most recently and return it ''' self.assertEquals('temp2', gn.transfer(TEST_DIR_TWO_SUB)) if __name__ == '__main__': unittest.main()
00aec138157bb9c2393588adcbf6f1ac9cfb63d3
testSmoother.py
testSmoother.py
import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",")
import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'r') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",")
Change 'rb' to 'r' on file open line
Change 'rb' to 'r' on file open line Python 3 update uses 'r' instead of Python 2 'r'
Python
mit
mgb45/MoGapFill,mgb45/MoGapFill
import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",") Change 'rb' to 'r' on file open line Python 3 update uses 'r' instead of Python 2 'r'
import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'r') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",")
<commit_before>import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",") <commit_msg>Change 'rb' to 'r' on file open line Python 3 update uses 'r' instead of Python 2 'r'<commit_after>
import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'r') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",")
import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",") Change 'rb' to 'r' on file open line Python 3 update uses 'r' instead of Python 2 'r'import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'r') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",")
<commit_before>import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",") <commit_msg>Change 'rb' to 'r' on file open line Python 3 update uses 'r' instead of Python 2 'r'<commit_after>import csv import numpy as np import smooth import time print 'Loading data from data.csv' with open('gait-raw.csv', 'r') as csvfile: reader = csv.reader(csvfile, delimiter=',') x = list(reader) rawdata = np.array(x).astype('float') print 'Done' start_time = time.time() y = smooth.smooth(rawdata,0.0025,1e-3) print ("%s seconds to process" % (time.time() - start_time)) print 'Saving output to out.csv' np.savetxt("out.csv", y, delimiter=",")
4f16a2bff6c7e972b0803fd3348408f13eddbf41
bin/update/deploy_dev_base.py
bin/update/deploy_dev_base.py
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets')
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds')
Update firefox os feeds on dev deploy
Update firefox os feeds on dev deploy
Python
mpl-2.0
analytics-pros/mozilla-bedrock,andreadelrio/bedrock,bensternthal/bedrock,petabyte/bedrock,SujaySKumar/bedrock,TheoChevalier/bedrock,malena/bedrock,chirilo/bedrock,gerv/bedrock,yglazko/bedrock,sylvestre/bedrock,gauthierm/bedrock,mozilla/bedrock,hoosteeno/bedrock,mozilla/bedrock,malena/bedrock,sgarrity/bedrock,jgmize/bedrock,marcoscaceres/bedrock,mkmelin/bedrock,flodolo/bedrock,kyoshino/bedrock,marcoscaceres/bedrock,dudepare/bedrock,jpetto/bedrock,sgarrity/bedrock,TheJJ100100/bedrock,ericawright/bedrock,Sancus/bedrock,CSCI-462-01-2017/bedrock,alexgibson/bedrock,petabyte/bedrock,rishiloyola/bedrock,petabyte/bedrock,alexgibson/bedrock,analytics-pros/mozilla-bedrock,flodolo/bedrock,jpetto/bedrock,MichaelKohler/bedrock,pascalchevrel/bedrock,CSCI-462-01-2017/bedrock,dudepare/bedrock,mahinthjoe/bedrock,craigcook/bedrock,davehunt/bedrock,sylvestre/bedrock,dudepare/bedrock,Sancus/bedrock,malena/bedrock,ericawright/bedrock,jacshfr/mozilla-bedrock,glogiotatidis/bedrock,pascalchevrel/bedrock,CSCI-462-01-2017/bedrock,sylvestre/bedrock,TheJJ100100/bedrock,Jobava/bedrock,gerv/bedrock,mkmelin/bedrock,analytics-pros/mozilla-bedrock,kyoshino/bedrock,pmclanahan/bedrock,mahinthjoe/bedrock,l-hedgehog/bedrock,jgmize/bedrock,mermi/bedrock,hoosteeno/bedrock,jacshfr/mozilla-bedrock,malena/bedrock,davehunt/bedrock,chirilo/bedrock,yglazko/bedrock,SujaySKumar/bedrock,SujaySKumar/bedrock,kyoshino/bedrock,chirilo/bedrock,marcoscaceres/bedrock,SujaySKumar/bedrock,alexgibson/bedrock,craigcook/bedrock,andreadelrio/bedrock,TheoChevalier/bedrock,jgmize/bedrock,pmclanahan/bedrock,davehunt/bedrock,Sancus/bedrock,dudepare/bedrock,Jobava/bedrock,schalkneethling/bedrock,mozilla/bedrock,mahinthjoe/bedrock,TheJJ100100/bedrock,MichaelKohler/bedrock,bensternthal/bedrock,ericawright/bedrock,andreadelrio/bedrock,jacshfr/mozilla-bedrock,mermi/bedrock,mkmelin/bedrock,glogiotatidis/bedrock,pascalchevrel/bedrock,schalkneethling/bedrock,l-hedgehog/bedrock,rishiloyola/bedrock,gauthierm/bedrock,sylvestre/bedrock,pascalchevrel/bedrock,petabyte/bedrock,glogiotatidis/bedrock,andreadelrio/bedrock,alexgibson/bedrock,jpetto/bedrock,pmclanahan/bedrock,schalkneethling/bedrock,hoosteeno/bedrock,yglazko/bedrock,TheJJ100100/bedrock,mermi/bedrock,jacshfr/mozilla-bedrock,MichaelKohler/bedrock,mozilla/bedrock,pmclanahan/bedrock,Jobava/bedrock,jgmize/bedrock,flodolo/bedrock,schalkneethling/bedrock,craigcook/bedrock,mermi/bedrock,MichaelKohler/bedrock,TheoChevalier/bedrock,jacshfr/mozilla-bedrock,jpetto/bedrock,gauthierm/bedrock,rishiloyola/bedrock,gerv/bedrock,bensternthal/bedrock,mahinthjoe/bedrock,sgarrity/bedrock,chirilo/bedrock,l-hedgehog/bedrock,ericawright/bedrock,glogiotatidis/bedrock,TheoChevalier/bedrock,mkmelin/bedrock,l-hedgehog/bedrock,gerv/bedrock,sgarrity/bedrock,rishiloyola/bedrock,Jobava/bedrock,flodolo/bedrock,davehunt/bedrock,analytics-pros/mozilla-bedrock,yglazko/bedrock,CSCI-462-01-2017/bedrock,hoosteeno/bedrock,bensternthal/bedrock,marcoscaceres/bedrock,kyoshino/bedrock,gauthierm/bedrock,Sancus/bedrock,craigcook/bedrock
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') Update firefox os feeds on dev deploy
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds')
<commit_before>import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') <commit_msg>Update firefox os feeds on dev deploy<commit_after>
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds')
import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') Update firefox os feeds on dev deployimport logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds')
<commit_before>import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') <commit_msg>Update firefox os feeds on dev deploy<commit_after>import logging from commander.deploy import task from deploy_base import * # noqa log = logging.getLogger(__name__) @task def database(ctx): # only ever run this one on demo and dev. management_cmd(ctx, 'bedrock_truncate_database --yes-i-am-sure') management_cmd(ctx, 'syncdb --migrate --noinput') management_cmd(ctx, 'rnasync') management_cmd(ctx, 'update_security_advisories --force --quiet', use_src_dir=True) management_cmd(ctx, 'cron update_reps_ical') management_cmd(ctx, 'cron update_tweets') management_cmd(ctx, 'runscript update_firefox_os_feeds')
3394278f379763dae9db34f3b528a229b8f06bc6
tempora/tests/test_timing.py
tempora/tests/test_timing.py
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.mark.skipif("not hasattr(time, 'tzset')") @pytest.fixture def alt_tz(monkeypatch): @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.fixture def alt_tz(monkeypatch): if not hasattr(time, 'tzset'): pytest.skip("tzset not available") @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
Revert "Use pytest.mark to selectively skip test."
Revert "Use pytest.mark to selectively skip test." Markers can not be applied to fixtures (https://docs.pytest.org/en/latest/reference/reference.html#marks). Fixes #16. This reverts commit 14d532af265e35af33a28d61a68d545993fc5b78.
Python
mit
jaraco/tempora
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.mark.skipif("not hasattr(time, 'tzset')") @pytest.fixture def alt_tz(monkeypatch): @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1 Revert "Use pytest.mark to selectively skip test." Markers can not be applied to fixtures (https://docs.pytest.org/en/latest/reference/reference.html#marks). Fixes #16. This reverts commit 14d532af265e35af33a28d61a68d545993fc5b78.
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.fixture def alt_tz(monkeypatch): if not hasattr(time, 'tzset'): pytest.skip("tzset not available") @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
<commit_before>import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.mark.skipif("not hasattr(time, 'tzset')") @pytest.fixture def alt_tz(monkeypatch): @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1 <commit_msg>Revert "Use pytest.mark to selectively skip test." Markers can not be applied to fixtures (https://docs.pytest.org/en/latest/reference/reference.html#marks). Fixes #16. This reverts commit 14d532af265e35af33a28d61a68d545993fc5b78.<commit_after>
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.fixture def alt_tz(monkeypatch): if not hasattr(time, 'tzset'): pytest.skip("tzset not available") @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.mark.skipif("not hasattr(time, 'tzset')") @pytest.fixture def alt_tz(monkeypatch): @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1 Revert "Use pytest.mark to selectively skip test." Markers can not be applied to fixtures (https://docs.pytest.org/en/latest/reference/reference.html#marks). Fixes #16. This reverts commit 14d532af265e35af33a28d61a68d545993fc5b78.import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.fixture def alt_tz(monkeypatch): if not hasattr(time, 'tzset'): pytest.skip("tzset not available") @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
<commit_before>import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.mark.skipif("not hasattr(time, 'tzset')") @pytest.fixture def alt_tz(monkeypatch): @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1 <commit_msg>Revert "Use pytest.mark to selectively skip test." Markers can not be applied to fixtures (https://docs.pytest.org/en/latest/reference/reference.html#marks). Fixes #16. This reverts commit 14d532af265e35af33a28d61a68d545993fc5b78.<commit_after>import datetime import time import contextlib import os from unittest import mock import pytest from tempora import timing def test_IntervalGovernor(): """ IntervalGovernor should prevent a function from being called more than once per interval. """ func_under_test = mock.MagicMock() # to look like a function, it needs a __name__ attribute func_under_test.__name__ = 'func_under_test' interval = datetime.timedelta(seconds=1) governed = timing.IntervalGovernor(interval)(func_under_test) governed('a') governed('b') governed(3, 'sir') func_under_test.assert_called_once_with('a') @pytest.fixture def alt_tz(monkeypatch): if not hasattr(time, 'tzset'): pytest.skip("tzset not available") @contextlib.contextmanager def change(): val = 'AEST-10AEDT-11,M10.5.0,M3.5.0' with monkeypatch.context() as ctx: ctx.setitem(os.environ, 'TZ', val) time.tzset() yield time.tzset() return change() def test_Stopwatch_timezone_change(alt_tz): """ The stopwatch should provide a consistent duration even if the timezone changes. """ watch = timing.Stopwatch() with alt_tz: assert abs(watch.split().total_seconds()) < 0.1
1b189a8b96fb36de3f7438d14dd1ee6bf1c0980f
www/start.py
www/start.py
#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Define server parameters application = tornado.web.Application([ (r"/", MainHandler), ]) application.listen(port) # Start the server tornado.ioloop.IOLoop.instance().start()
#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") # Define tornado application application = tornado.web.Application([ (r"/", MainHandler), ]) if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Start the server application.listen(port) tornado.ioloop.IOLoop.instance().start()
Declare tornado application outside __main__ block
Declare tornado application outside __main__ block
Python
mit
dubzzz/py-run-tracking,dubzzz/py-run-tracking,dubzzz/py-run-tracking
#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Define server parameters application = tornado.web.Application([ (r"/", MainHandler), ]) application.listen(port) # Start the server tornado.ioloop.IOLoop.instance().start() Declare tornado application outside __main__ block
#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") # Define tornado application application = tornado.web.Application([ (r"/", MainHandler), ]) if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Start the server application.listen(port) tornado.ioloop.IOLoop.instance().start()
<commit_before>#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Define server parameters application = tornado.web.Application([ (r"/", MainHandler), ]) application.listen(port) # Start the server tornado.ioloop.IOLoop.instance().start() <commit_msg>Declare tornado application outside __main__ block<commit_after>
#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") # Define tornado application application = tornado.web.Application([ (r"/", MainHandler), ]) if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Start the server application.listen(port) tornado.ioloop.IOLoop.instance().start()
#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Define server parameters application = tornado.web.Application([ (r"/", MainHandler), ]) application.listen(port) # Start the server tornado.ioloop.IOLoop.instance().start() Declare tornado application outside __main__ block#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") # Define tornado application application = tornado.web.Application([ (r"/", MainHandler), ]) if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Start the server application.listen(port) tornado.ioloop.IOLoop.instance().start()
<commit_before>#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Define server parameters application = tornado.web.Application([ (r"/", MainHandler), ]) application.listen(port) # Start the server tornado.ioloop.IOLoop.instance().start() <commit_msg>Declare tornado application outside __main__ block<commit_after>#!/usr/bin/python # Launch a very light-HTTP server: Tornado # # Requirements (import from): # - tornado # # Syntax: # ./start.py <port=8080> import tornado.ioloop import tornado.web import sys from os import path #sys.path.append(path.join(path.dirname(__file__), "../scripts/")) #from generate_db import DEFAULT_DB class MainHandler(tornado.web.RequestHandler): def get(self): self.write("Hello World ^^") # Define tornado application application = tornado.web.Application([ (r"/", MainHandler), ]) if __name__ == "__main__": if len(sys.argv) != 1 and len(sys.argv) != 2: print('''Syntax: ./start.py <port=8080>''') exit(1) try: if (len(sys.argv) == 2): port = int(sys.argv[1]) else: port = 8080 except ValueError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(2) except TypeError, e: print('''ERROR: {}'''.format(e)) print('''Syntax: ./start.py <port=8080>''') exit(3) # Start the server application.listen(port) tornado.ioloop.IOLoop.instance().start()
854fc85087c77dc1f4291f0811eecdf91da132aa
TorGTK/pref_handle.py
TorGTK/pref_handle.py
import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) objs[pref_mappings[option]].set_text(value) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close()
import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) print objs[pref_mappings[option]] objs[pref_mappings[option]].set_value(int(value)) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close()
Add code for saving preferences correctly, but only for integers now
Add code for saving preferences correctly, but only for integers now
Python
bsd-2-clause
neelchauhan/TorGTK,neelchauhan/TorNova
import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) objs[pref_mappings[option]].set_text(value) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close() Add code for saving preferences correctly, but only for integers now
import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) print objs[pref_mappings[option]] objs[pref_mappings[option]].set_value(int(value)) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close()
<commit_before>import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) objs[pref_mappings[option]].set_text(value) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close() <commit_msg>Add code for saving preferences correctly, but only for integers now<commit_after>
import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) print objs[pref_mappings[option]] objs[pref_mappings[option]].set_value(int(value)) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close()
import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) objs[pref_mappings[option]].set_text(value) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close() Add code for saving preferences correctly, but only for integers nowimport ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) print objs[pref_mappings[option]] objs[pref_mappings[option]].set_value(int(value)) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close()
<commit_before>import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) objs[pref_mappings[option]].set_text(value) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close() <commit_msg>Add code for saving preferences correctly, but only for integers now<commit_after>import ConfigParser from gi.repository import Gtk from pref_mapping import * from var import * def read_config_if_exists(filename): if os.path.isfile(filename): # Init config parser and read config Config = ConfigParser.SafeConfigParser() Config.read(filename) section = "TorGTKprefs" # Loop through options options = Config.options(section) for option in options: value = Config.get(section, option) print objs[pref_mappings[option]] objs[pref_mappings[option]].set_value(int(value)) def write_config(filename): # Open file config_fd = open(filename, "w") Config = ConfigParser.ConfigParser() Config.add_section("TorGTKprefs") # Write sections to file and close it for key in pref_mappings: Config.set("TorGTKprefs", key, objs[pref_mappings[key]].get_text()) Config.write(config_fd) config_fd.close()
b4d76c715810ddd30c0966df2614cd6ed7b03566
tweets/views.py
tweets/views.py
from django.http import Http404 from django.shortcuts import render from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() queryset = queryset.filter(user__username=self.kwargs.get('username')) if not queryset: raise Http404(_('Username not found.')) return queryset
from django.http import Http404 from django.contrib.auth import get_user_model from django.shortcuts import render, get_object_or_404 from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): # Check to see if user exists. 404 if not. username = self.kwargs.get('username') user = get_object_or_404(get_user_model(), username=username) # Filter messages by the user as author. queryset = super().get_queryset() return queryset.filter(user=user) return queryset
Adjust user filtering logic to 404 only if user does not exist
Adjust user filtering logic to 404 only if user does not exist
Python
mit
pennomi/openwest2015-twitter-clone,pennomi/openwest2015-twitter-clone,pennomi/openwest2015-twitter-clone
from django.http import Http404 from django.shortcuts import render from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() queryset = queryset.filter(user__username=self.kwargs.get('username')) if not queryset: raise Http404(_('Username not found.')) return queryset Adjust user filtering logic to 404 only if user does not exist
from django.http import Http404 from django.contrib.auth import get_user_model from django.shortcuts import render, get_object_or_404 from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): # Check to see if user exists. 404 if not. username = self.kwargs.get('username') user = get_object_or_404(get_user_model(), username=username) # Filter messages by the user as author. queryset = super().get_queryset() return queryset.filter(user=user) return queryset
<commit_before>from django.http import Http404 from django.shortcuts import render from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() queryset = queryset.filter(user__username=self.kwargs.get('username')) if not queryset: raise Http404(_('Username not found.')) return queryset <commit_msg>Adjust user filtering logic to 404 only if user does not exist<commit_after>
from django.http import Http404 from django.contrib.auth import get_user_model from django.shortcuts import render, get_object_or_404 from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): # Check to see if user exists. 404 if not. username = self.kwargs.get('username') user = get_object_or_404(get_user_model(), username=username) # Filter messages by the user as author. queryset = super().get_queryset() return queryset.filter(user=user) return queryset
from django.http import Http404 from django.shortcuts import render from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() queryset = queryset.filter(user__username=self.kwargs.get('username')) if not queryset: raise Http404(_('Username not found.')) return queryset Adjust user filtering logic to 404 only if user does not existfrom django.http import Http404 from django.contrib.auth import get_user_model from django.shortcuts import render, get_object_or_404 from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): # Check to see if user exists. 404 if not. username = self.kwargs.get('username') user = get_object_or_404(get_user_model(), username=username) # Filter messages by the user as author. queryset = super().get_queryset() return queryset.filter(user=user) return queryset
<commit_before>from django.http import Http404 from django.shortcuts import render from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() queryset = queryset.filter(user__username=self.kwargs.get('username')) if not queryset: raise Http404(_('Username not found.')) return queryset <commit_msg>Adjust user filtering logic to 404 only if user does not exist<commit_after>from django.http import Http404 from django.contrib.auth import get_user_model from django.shortcuts import render, get_object_or_404 from django.utils.translation import ugettext as _ from django.views.generic import ListView from .models import Message class MessageList(ListView): template_name = "message_list.html" model = Message class MyMessageList(MessageList): def get_queryset(self): queryset = super().get_queryset() return queryset.filter(user=self.request.user) class FilteredMessageList(MessageList): def get_queryset(self): # Check to see if user exists. 404 if not. username = self.kwargs.get('username') user = get_object_or_404(get_user_model(), username=username) # Filter messages by the user as author. queryset = super().get_queryset() return queryset.filter(user=user) return queryset
7aeac3ee3429f5d98bc6fb6f475f0969ebabba1e
breakpad.py
breakpad.py
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'): print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_tb = getattr(sys, 'last_traceback', None) if last_tb and sys.last_type is not KeyboardInterrupt: SendStack(''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException)
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys # Configure these values. DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad' def SendStack(last_tb, stack, url=None): if not url: url = DEFAULT_URL print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), 'exception': last_tb, } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_value = getattr(sys, 'last_value', None) if last_value and not isinstance(last_value, KeyboardInterrupt): last_tb = getattr(sys, 'last_traceback', None) if last_tb: SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException)
Fix KeyboardInterrupt exception filtering. Add exception information and not just the stack trace. Make the url easier to change at runtime.
Fix KeyboardInterrupt exception filtering. Add exception information and not just the stack trace. Make the url easier to change at runtime. Review URL: http://codereview.chromium.org/2109001 git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@47179 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
aleonliao/depot_tools,duanwujie/depot_tools,fanjunwei/depot_tools,michalliu/chromium-depot_tools,duongbaoduy/gtools,smikes/depot_tools,jankeromnes/depot_tools,disigma/depot_tools,primiano/depot_tools,Midrya/chromium,cybertk/depot_tools,jankeromnes/depot_tools,fracting/depot_tools,disigma/depot_tools,yetu/repotools,michalliu/chromium-depot_tools,mlufei/depot_tools,smikes/depot_tools,HackFisher/depot_tools,mlufei/depot_tools,hsharsha/depot_tools,michalliu/chromium-depot_tools,Neozaru/depot_tools,Chilledheart/depot_tools,michalliu/chromium-depot_tools,ajohnson23/depot_tools,gcodetogit/depot_tools,kaiix/depot_tools,kromain/chromium-tools,eatbyte/depot_tools,chinmaygarde/depot_tools,gcodetogit/depot_tools,sarvex/depot-tools,airtimemedia/depot_tools,cybertk/depot_tools,duongbaoduy/gtools,hsharsha/depot_tools,xuyuhan/depot_tools,smikes/depot_tools,npe9/depot_tools,Chilledheart/depot_tools,npe9/depot_tools,eatbyte/depot_tools,azunite/chrome_build,yetu/repotools,jankeromnes/depot_tools,kaiix/depot_tools,azureplus/chromium_depot_tools,npe9/depot_tools,azunite/chrome_build,cybertk/depot_tools,kromain/chromium-tools,fanjunwei/depot_tools,G-P-S/depot_tools,fracting/depot_tools,SuYiling/chrome_depot_tools,kaiix/depot_tools,chinmaygarde/depot_tools,liaorubei/depot_tools,smikes/depot_tools,xuyuhan/depot_tools,Phonebooth/depot_tools,withtone/depot_tools,G-P-S/depot_tools,jankeromnes/depot_tools,airtimemedia/depot_tools,coreos/depot_tools,kromain/chromium-tools,coreos/depot_tools,Phonebooth/depot_tools,smikes/depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,mlufei/depot_tools,liaorubei/depot_tools,sarvex/depot-tools,aleonliao/depot_tools,withtone/depot_tools,duanwujie/depot_tools,Neozaru/depot_tools,duongbaoduy/gtools,coreos/depot_tools,HackFisher/depot_tools,npe9/depot_tools,eatbyte/depot_tools,kromain/chromium-tools,CoherentLabs/depot_tools,withtone/depot_tools,cybertk/depot_tools,jankeromnes/depot_tools,Midrya/chromium,azunite/chrome_build,liaorubei/depot_tools,primiano/depot_tools,hsharsha/depot_tools,fanjunwei/depot_tools,sarvex/depot-tools,airtimemedia/depot_tools,cybertk/depot_tools,azureplus/chromium_depot_tools,primiano/depot_tools,Neozaru/depot_tools,Neozaru/depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,coreos/depot_tools,Phonebooth/depot_tools,ajohnson23/depot_tools,xuyuhan/depot_tools,jankeromnes/depot_tools,jankeromnes/depot_tools,liaorubei/depot_tools,Chilledheart/depot_tools,SuYiling/chrome_depot_tools,yetu/repotools,Chilledheart/depot_tools,G-P-S/depot_tools,azureplus/chromium_depot_tools,SuYiling/chrome_depot_tools,chinmaygarde/depot_tools,ajohnson23/depot_tools,Phonebooth/depot_tools,HackFisher/depot_tools,coreos/depot_tools,fanjunwei/depot_tools,coreos/depot_tools,HackFisher/depot_tools,duanwujie/depot_tools,xuyuhan/depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,G-P-S/depot_tools,Neozaru/depot_tools,CoherentLabs/depot_tools,gcodetogit/depot_tools,Chilledheart/depot_tools,aleonliao/depot_tools,disigma/depot_tools,Midrya/chromium,airtimemedia/depot_tools,sarvex/depot-tools,eatbyte/depot_tools,fracting/depot_tools
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'): print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_tb = getattr(sys, 'last_traceback', None) if last_tb and sys.last_type is not KeyboardInterrupt: SendStack(''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException) Fix KeyboardInterrupt exception filtering. Add exception information and not just the stack trace. Make the url easier to change at runtime. Review URL: http://codereview.chromium.org/2109001 git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@47179 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys # Configure these values. DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad' def SendStack(last_tb, stack, url=None): if not url: url = DEFAULT_URL print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), 'exception': last_tb, } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_value = getattr(sys, 'last_value', None) if last_value and not isinstance(last_value, KeyboardInterrupt): last_tb = getattr(sys, 'last_traceback', None) if last_tb: SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException)
<commit_before># Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'): print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_tb = getattr(sys, 'last_traceback', None) if last_tb and sys.last_type is not KeyboardInterrupt: SendStack(''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException) <commit_msg>Fix KeyboardInterrupt exception filtering. Add exception information and not just the stack trace. Make the url easier to change at runtime. Review URL: http://codereview.chromium.org/2109001 git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@47179 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys # Configure these values. DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad' def SendStack(last_tb, stack, url=None): if not url: url = DEFAULT_URL print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), 'exception': last_tb, } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_value = getattr(sys, 'last_value', None) if last_value and not isinstance(last_value, KeyboardInterrupt): last_tb = getattr(sys, 'last_traceback', None) if last_tb: SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException)
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'): print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_tb = getattr(sys, 'last_traceback', None) if last_tb and sys.last_type is not KeyboardInterrupt: SendStack(''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException) Fix KeyboardInterrupt exception filtering. Add exception information and not just the stack trace. Make the url easier to change at runtime. Review URL: http://codereview.chromium.org/2109001 git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@47179 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys # Configure these values. DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad' def SendStack(last_tb, stack, url=None): if not url: url = DEFAULT_URL print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), 'exception': last_tb, } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_value = getattr(sys, 'last_value', None) if last_value and not isinstance(last_value, KeyboardInterrupt): last_tb = getattr(sys, 'last_traceback', None) if last_tb: SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException)
<commit_before># Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'): print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_tb = getattr(sys, 'last_traceback', None) if last_tb and sys.last_type is not KeyboardInterrupt: SendStack(''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException) <commit_msg>Fix KeyboardInterrupt exception filtering. Add exception information and not just the stack trace. Make the url easier to change at runtime. Review URL: http://codereview.chromium.org/2109001 git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@47179 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Breakpad for Python. Sends a notification when a process stops on an exception.""" import atexit import getpass import urllib import traceback import socket import sys # Configure these values. DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad' def SendStack(last_tb, stack, url=None): if not url: url = DEFAULT_URL print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), 'exception': last_tb, } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.') def CheckForException(): last_value = getattr(sys, 'last_value', None) if last_value and not isinstance(last_value, KeyboardInterrupt): last_tb = getattr(sys, 'last_traceback', None) if last_tb: SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb))) if (not 'test' in sys.modules['__main__'].__file__ and socket.gethostname().endswith('.google.com')): # Skip unit tests and we don't want anything from non-googler. atexit.register(CheckForException)
89c1530882c67a135687df389d0a96d2283873c8
conda_smithy/tests/test_feedstock_io.py
conda_smithy/tests/test_feedstock_io.py
from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import conda_smithy.feedstock_io as fio class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import git import conda_smithy.feedstock_io as fio def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") with io.open(keep_filename, "w", encoding = "utf-8") as fh: fh.write("") def parameterize(): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth ]: for get_repo in [ lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir) ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) old_dir = os.getcwd() os.chdir(tmp_dir) yield ( tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir) ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: self.assertTrue( fio.get_repo(pathfunc(tmp_dir)) is None ) else: self.assertIsInstance( fio.get_repo(pathfunc(tmp_dir)), git.Repo ) def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
Add some tests for `get_repo`.
Add some tests for `get_repo`.
Python
bsd-3-clause
shadowwalkersb/conda-smithy,ocefpaf/conda-smithy,conda-forge/conda-smithy,ocefpaf/conda-smithy,conda-forge/conda-smithy,shadowwalkersb/conda-smithy
from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import conda_smithy.feedstock_io as fio class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main() Add some tests for `get_repo`.
from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import git import conda_smithy.feedstock_io as fio def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") with io.open(keep_filename, "w", encoding = "utf-8") as fh: fh.write("") def parameterize(): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth ]: for get_repo in [ lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir) ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) old_dir = os.getcwd() os.chdir(tmp_dir) yield ( tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir) ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: self.assertTrue( fio.get_repo(pathfunc(tmp_dir)) is None ) else: self.assertIsInstance( fio.get_repo(pathfunc(tmp_dir)), git.Repo ) def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
<commit_before>from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import conda_smithy.feedstock_io as fio class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main() <commit_msg>Add some tests for `get_repo`.<commit_after>
from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import git import conda_smithy.feedstock_io as fio def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") with io.open(keep_filename, "w", encoding = "utf-8") as fh: fh.write("") def parameterize(): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth ]: for get_repo in [ lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir) ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) old_dir = os.getcwd() os.chdir(tmp_dir) yield ( tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir) ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: self.assertTrue( fio.get_repo(pathfunc(tmp_dir)) is None ) else: self.assertIsInstance( fio.get_repo(pathfunc(tmp_dir)), git.Repo ) def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import conda_smithy.feedstock_io as fio class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main() Add some tests for `get_repo`.from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import git import conda_smithy.feedstock_io as fio def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") with io.open(keep_filename, "w", encoding = "utf-8") as fh: fh.write("") def parameterize(): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth ]: for get_repo in [ lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir) ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) old_dir = os.getcwd() os.chdir(tmp_dir) yield ( tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir) ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: self.assertTrue( fio.get_repo(pathfunc(tmp_dir)) is None ) else: self.assertIsInstance( fio.get_repo(pathfunc(tmp_dir)), git.Repo ) def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
<commit_before>from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import conda_smithy.feedstock_io as fio class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main() <commit_msg>Add some tests for `get_repo`.<commit_after>from __future__ import unicode_literals import io import os import shutil import tempfile import unittest import git import conda_smithy.feedstock_io as fio def keep_dir(dirname): keep_filename = os.path.join(dirname, ".keep") with io.open(keep_filename, "w", encoding = "utf-8") as fh: fh.write("") def parameterize(): for pathfunc in [ lambda pth, tmp_dir: os.path.relpath(pth, tmp_dir), lambda pth, tmp_dir: pth ]: for get_repo in [ lambda tmp_dir: None, lambda tmp_dir: git.Repo.init(tmp_dir) ]: try: tmp_dir = tempfile.mkdtemp() keep_dir(tmp_dir) old_dir = os.getcwd() os.chdir(tmp_dir) yield ( tmp_dir, get_repo(tmp_dir), lambda pth: pathfunc(pth, tmp_dir) ) finally: os.chdir(old_dir) shutil.rmtree(tmp_dir) class TestFeedstockIO(unittest.TestCase): def setUp(self): self.old_dir = os.getcwd() self.tmp_dir = tempfile.mkdtemp() os.chdir(self.tmp_dir) with io.open(os.path.abspath(".keep"), "w", encoding="utf-8") as fh: fh.write("") def test_repo(self): for tmp_dir, repo, pathfunc in parameterize(): if repo is None: self.assertTrue( fio.get_repo(pathfunc(tmp_dir)) is None ) else: self.assertIsInstance( fio.get_repo(pathfunc(tmp_dir)), git.Repo ) def tearDown(self): os.chdir(self.old_dir) del self.old_dir shutil.rmtree(self.tmp_dir) del self.tmp_dir if __name__ == '__main__': unittest.main()
c1403cc8beac2a9142a21c2de555eb9e5e090f9b
python_hosts/__init__.py
python_hosts/__init__.py
# -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidComment) from python_hosts.hosts import Hosts from python_hosts.utils import is_readable, is_ipv4, is_ipv6
# -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.hosts import Hosts, HostsEntry from python_hosts.utils import is_readable, is_ipv4, is_ipv6, valid_hostnames from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidIPv6Address, InvalidComment)
Revert "refactor: remove unused imports."
Revert "refactor: remove unused imports." This reverts commit ceb37d26a48d7d56b3d0ded0376eb9498ce7ef6f.
Python
mit
jonhadfield/python-hosts
# -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidComment) from python_hosts.hosts import Hosts from python_hosts.utils import is_readable, is_ipv4, is_ipv6 Revert "refactor: remove unused imports." This reverts commit ceb37d26a48d7d56b3d0ded0376eb9498ce7ef6f.
# -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.hosts import Hosts, HostsEntry from python_hosts.utils import is_readable, is_ipv4, is_ipv6, valid_hostnames from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidIPv6Address, InvalidComment)
<commit_before># -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidComment) from python_hosts.hosts import Hosts from python_hosts.utils import is_readable, is_ipv4, is_ipv6 <commit_msg>Revert "refactor: remove unused imports." This reverts commit ceb37d26a48d7d56b3d0ded0376eb9498ce7ef6f.<commit_after>
# -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.hosts import Hosts, HostsEntry from python_hosts.utils import is_readable, is_ipv4, is_ipv6, valid_hostnames from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidIPv6Address, InvalidComment)
# -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidComment) from python_hosts.hosts import Hosts from python_hosts.utils import is_readable, is_ipv4, is_ipv6 Revert "refactor: remove unused imports." This reverts commit ceb37d26a48d7d56b3d0ded0376eb9498ce7ef6f.# -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.hosts import Hosts, HostsEntry from python_hosts.utils import is_readable, is_ipv4, is_ipv6, valid_hostnames from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidIPv6Address, InvalidComment)
<commit_before># -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidComment) from python_hosts.hosts import Hosts from python_hosts.utils import is_readable, is_ipv4, is_ipv6 <commit_msg>Revert "refactor: remove unused imports." This reverts commit ceb37d26a48d7d56b3d0ded0376eb9498ce7ef6f.<commit_after># -*- coding: utf-8 -*- """ This package contains all of the modules utilised by the python-hosts library. hosts: Contains the Hosts and HostsEntry classes that represent instances of a hosts file and it's individual lines/entries utils: Contains helper functions to check the available operations on a hosts file and the validity of a hosts file entry exception: Contains the custom exceptions that are raised in the event of an error in processing a hosts file and its entries """ from python_hosts.hosts import Hosts, HostsEntry from python_hosts.utils import is_readable, is_ipv4, is_ipv6, valid_hostnames from python_hosts.exception import (HostsException, HostsEntryException, InvalidIPv4Address, InvalidIPv6Address, InvalidComment)
76458ead1675025e3fbe3bed77b64466d4bbd079
devicehive/transports/base_transport.py
devicehive/transports/base_transport.py
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_obj(self, obj, receive_obj=True, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
Replace send_request and request methods with send_obj
Replace send_request and request methods with send_obj
Python
apache-2.0
devicehive/devicehive-python
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError Replace send_request and request methods with send_obj
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_obj(self, obj, receive_obj=True, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
<commit_before>class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError <commit_msg>Replace send_request and request methods with send_obj<commit_after>
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_obj(self, obj, receive_obj=True, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError Replace send_request and request methods with send_objclass BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_obj(self, obj, receive_obj=True, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
<commit_before>class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_request(self, obj, **params): raise NotImplementedError def request(self, obj, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError <commit_msg>Replace send_request and request methods with send_obj<commit_after>class BaseTransport(object): """Base transport class.""" def __init__(self, name, data_format_class, data_format_options, handler_class, handler_options): self.name = name self._data_format = data_format_class(**data_format_options) self._data_type = self._data_format.data_type self._handler = handler_class(self, **handler_options) self._connected = False def _assert_not_connected(self): assert not self._connected, 'transport connection already created' def _assert_connected(self): assert self._connected, 'transport connection has not created' def _encode_obj(self, obj): return self._data_format.encode(obj) def _decode_data(self, data): return self._data_format.decode(data) def _call_handler_method(self, name, *args): getattr(self._handler, name)(*args) def is_connected(self): return self._connected def connect(self, url, **options): raise NotImplementedError def send_obj(self, obj, receive_obj=True, **params): raise NotImplementedError def close(self): raise NotImplementedError def join(self, timeout=None): raise NotImplementedError
24c2b5fe959c66113ef66152c39810a1db0c0cf4
dhcpcanon/conflog.py
dhcpcanon/conflog.py
import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s'\ '%(module)s[%(process)d.]'\ ' %(filename)s:%(lineno)s -'\ ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'formsys': { 'format': "%(asctime)s %(module)s[%(process)s.%(thread)s]: %(message)s", 'datefmt': "%b %d %H:%M:%S" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'formsys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, } }
import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s' ' %(filename)s:%(lineno)s -' ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'simple': { 'format': "%(message)s", }, 'sys': { 'format': "%(module)s[%(process)s]: " "%(message)s" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'stdoutscapy': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'simple', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'sys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, "scapy.interactive": { 'handlers': ['stdoutscapy'], 'level': logging.INFO, 'propagate': False } } }
Add logger for scapy Automaton
Add logger for scapy Automaton
Python
mit
DHCPAP/dhcpcanon,juxor/dhcpcanon_debian,juga0/dhcpcanon,DHCPAP/dhcpcanon,juxor/dhcpcanon_debian,juga0/dhcpcanon
import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s'\ '%(module)s[%(process)d.]'\ ' %(filename)s:%(lineno)s -'\ ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'formsys': { 'format': "%(asctime)s %(module)s[%(process)s.%(thread)s]: %(message)s", 'datefmt': "%b %d %H:%M:%S" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'formsys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, } } Add logger for scapy Automaton
import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s' ' %(filename)s:%(lineno)s -' ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'simple': { 'format': "%(message)s", }, 'sys': { 'format': "%(module)s[%(process)s]: " "%(message)s" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'stdoutscapy': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'simple', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'sys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, "scapy.interactive": { 'handlers': ['stdoutscapy'], 'level': logging.INFO, 'propagate': False } } }
<commit_before> import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s'\ '%(module)s[%(process)d.]'\ ' %(filename)s:%(lineno)s -'\ ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'formsys': { 'format': "%(asctime)s %(module)s[%(process)s.%(thread)s]: %(message)s", 'datefmt': "%b %d %H:%M:%S" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'formsys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, } } <commit_msg>Add logger for scapy Automaton<commit_after>
import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s' ' %(filename)s:%(lineno)s -' ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'simple': { 'format': "%(message)s", }, 'sys': { 'format': "%(module)s[%(process)s]: " "%(message)s" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'stdoutscapy': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'simple', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'sys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, "scapy.interactive": { 'handlers': ['stdoutscapy'], 'level': logging.INFO, 'propagate': False } } }
import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s'\ '%(module)s[%(process)d.]'\ ' %(filename)s:%(lineno)s -'\ ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'formsys': { 'format': "%(asctime)s %(module)s[%(process)s.%(thread)s]: %(message)s", 'datefmt': "%b %d %H:%M:%S" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'formsys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, } } Add logger for scapy Automaton import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s' ' %(filename)s:%(lineno)s -' ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'simple': { 'format': "%(message)s", }, 'sys': { 'format': "%(module)s[%(process)s]: " "%(message)s" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'stdoutscapy': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'simple', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'sys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, "scapy.interactive": { 'handlers': ['stdoutscapy'], 'level': logging.INFO, 'propagate': False } } }
<commit_before> import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s'\ '%(module)s[%(process)d.]'\ ' %(filename)s:%(lineno)s -'\ ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'formsys': { 'format': "%(asctime)s %(module)s[%(process)s.%(thread)s]: %(message)s", 'datefmt': "%b %d %H:%M:%S" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'formsys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, } } <commit_msg>Add logger for scapy Automaton<commit_after> import logging import sys LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'verbose': { 'format': '%(asctime)s %(levelname)s' ' %(filename)s:%(lineno)s -' ' %(funcName)s - %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S' }, 'simple': { 'format': "%(message)s", }, 'sys': { 'format': "%(module)s[%(process)s]: " "%(message)s" } }, 'handlers': { 'stdout': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'verbose', 'level': 'DEBUG', }, 'stdoutscapy': { 'class': 'logging.StreamHandler', 'stream': sys.stdout, 'formatter': 'simple', 'level': 'DEBUG', }, 'syslog': { 'class': 'logging.handlers.SysLogHandler', 'address': '/dev/log', 'formatter': 'sys', 'level': 'INFO', }, }, 'loggers': { 'dhcpcanon': { 'handlers': ['syslog', 'stdout'], 'level': logging.INFO, 'propagate': False }, "scapy.interactive": { 'handlers': ['stdoutscapy'], 'level': logging.INFO, 'propagate': False } } }
48d699fb7d1341dad182412dadd19ea9ee661b30
localtv/management/commands/update_original_data.py
localtv/management/commands/update_original_data.py
import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except Exception: traceback.print_exc()
import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models import vidscraper.errors class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except vidscraper.errors.CantIdentifyUrl, e: pass # It is okay if we cannot update a remote video. No need to be noisy. except Exception: traceback.print_exc()
Add a filter for CantIdentifyUrl errors.
Add a filter for CantIdentifyUrl errors.
Python
agpl-3.0
pculture/mirocommunity,pculture/mirocommunity,pculture/mirocommunity,pculture/mirocommunity
import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except Exception: traceback.print_exc() Add a filter for CantIdentifyUrl errors.
import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models import vidscraper.errors class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except vidscraper.errors.CantIdentifyUrl, e: pass # It is okay if we cannot update a remote video. No need to be noisy. except Exception: traceback.print_exc()
<commit_before>import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except Exception: traceback.print_exc() <commit_msg>Add a filter for CantIdentifyUrl errors.<commit_after>
import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models import vidscraper.errors class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except vidscraper.errors.CantIdentifyUrl, e: pass # It is okay if we cannot update a remote video. No need to be noisy. except Exception: traceback.print_exc()
import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except Exception: traceback.print_exc() Add a filter for CantIdentifyUrl errors.import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models import vidscraper.errors class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except vidscraper.errors.CantIdentifyUrl, e: pass # It is okay if we cannot update a remote video. No need to be noisy. except Exception: traceback.print_exc()
<commit_before>import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except Exception: traceback.print_exc() <commit_msg>Add a filter for CantIdentifyUrl errors.<commit_after>import traceback from django.core.management.base import NoArgsCommand from localtv.management import site_too_old from localtv import models import vidscraper.errors class Command(NoArgsCommand): args = '' def handle_noargs(self, **options): if site_too_old(): return for original in models.OriginalVideo.objects.exclude( video__status=models.FEED_STATUS_REJECTED): try: original.update() except vidscraper.errors.CantIdentifyUrl, e: pass # It is okay if we cannot update a remote video. No need to be noisy. except Exception: traceback.print_exc()
b7acb1a7372c7b6c39c0b5bbfe61fb8e886ed5bc
fulfil_client/client.py
fulfil_client/client.py
import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content)
import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) def create(self, data): response = self.client.session.post( self.path, data=dumps(data) ) return loads(response.content)
Add method to create resource
Add method to create resource
Python
isc
fulfilio/fulfil-python-api,sharoonthomas/fulfil-python-api
import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) Add method to create resource
import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) def create(self, data): response = self.client.session.post( self.path, data=dumps(data) ) return loads(response.content)
<commit_before>import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) <commit_msg>Add method to create resource<commit_after>
import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) def create(self, data): response = self.client.session.post( self.path, data=dumps(data) ) return loads(response.content)
import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) Add method to create resourceimport json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) def create(self, data): response = self.client.session.post( self.path, data=dumps(data) ) return loads(response.content)
<commit_before>import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) <commit_msg>Add method to create resource<commit_after>import json import requests from functools import partial from trytond.protocols.jsonrpc import JSONDecoder, JSONEncoder dumps = partial(json.dumps, cls=JSONEncoder) loads = partial(json.loads, object_hook=JSONDecoder()) class Client(object): def __init__(self, subdomain, api_key): self.subdomain = subdomain self.api_key = api_key self.base_url = 'https://%s.fulfil.io/api/v1' % self.subdomain self.session = requests.Session() self.session.headers.update({'x-api-key': api_key}) def model(self, name): return Model(self, name) class Model(object): def __init__(self, client, model_name): self.client = client self.model_name = model_name @property def path(self): return '%s/model/%s' % (self.client.base_url, self.model_name) def get(self, id): return loads( self.client.session.get( self.path + '/%d' % id ).content ) def search(self, filter, page=1, per_page=10, fields=None): response = self.client.session.get( self.path, params={ 'filter': dumps(filter or []), 'page': page, 'per_page': per_page, 'field': fields, } ) return loads(response.content) def create(self, data): response = self.client.session.post( self.path, data=dumps(data) ) return loads(response.content)
d31382c666444c4947ca35bb67ddb851236e2e49
automata/automaton.py
automata/automaton.py
#!/usr/bin/env python3 import abc class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass
#!/usr/bin/env python3 import abc class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass
Move Automaton class above exception classes
Move Automaton class above exception classes
Python
mit
caleb531/automata
#!/usr/bin/env python3 import abc class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass Move Automaton class above exception classes
#!/usr/bin/env python3 import abc class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass
<commit_before>#!/usr/bin/env python3 import abc class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass <commit_msg>Move Automaton class above exception classes<commit_after>
#!/usr/bin/env python3 import abc class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass
#!/usr/bin/env python3 import abc class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass Move Automaton class above exception classes#!/usr/bin/env python3 import abc class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass
<commit_before>#!/usr/bin/env python3 import abc class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass <commit_msg>Move Automaton class above exception classes<commit_after>#!/usr/bin/env python3 import abc class Automaton(metaclass=abc.ABCMeta): def __init__(self, states, symbols, transitions, initial_state, final_states): """initialize a complete finite automaton""" self.states = states self.symbols = symbols self.transitions = transitions self.initial_state = initial_state self.final_states = final_states self.validate_automaton() @abc.abstractmethod def validate_input(self): pass @abc.abstractmethod def validate_automaton(self): pass class AutomatonError(Exception): """the base class for all automaton-related errors""" pass class InvalidStateError(AutomatonError): """a state is not a valid state for this automaton""" pass class InvalidSymbolError(AutomatonError): """a symbol is not a valid symbol for this automaton""" pass class MissingStateError(AutomatonError): """a state is missing from the transition function""" pass class MissingSymbolError(AutomatonError): """a symbol is missing from the transition function""" pass class FinalStateError(AutomatonError): """the automaton stopped at a non-final state""" pass
d42b826ea6105956511cfb8f5e8d13b61f7c8033
Ratings-Counter.py
Ratings-Counter.py
from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value)
# import os # import sys # # # Path for spark source folder # os.environ['SPARK_HOME'] = "/usr/local/Cellar/apache-spark/1.6.1" # # # Append pyspark to Python Path # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python") # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python/lib/py4j-0.9-src.zip") from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value)
Test to make file run in IDE
Test to make file run in IDE
Python
mit
tonirilix/apache-spark-hands-on
from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value) Test to make file run in IDE
# import os # import sys # # # Path for spark source folder # os.environ['SPARK_HOME'] = "/usr/local/Cellar/apache-spark/1.6.1" # # # Append pyspark to Python Path # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python") # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python/lib/py4j-0.9-src.zip") from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value)
<commit_before>from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value) <commit_msg>Test to make file run in IDE<commit_after>
# import os # import sys # # # Path for spark source folder # os.environ['SPARK_HOME'] = "/usr/local/Cellar/apache-spark/1.6.1" # # # Append pyspark to Python Path # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python") # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python/lib/py4j-0.9-src.zip") from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value)
from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value) Test to make file run in IDE# import os # import sys # # # Path for spark source folder # os.environ['SPARK_HOME'] = "/usr/local/Cellar/apache-spark/1.6.1" # # # Append pyspark to Python Path # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python") # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python/lib/py4j-0.9-src.zip") from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value)
<commit_before>from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value) <commit_msg>Test to make file run in IDE<commit_after># import os # import sys # # # Path for spark source folder # os.environ['SPARK_HOME'] = "/usr/local/Cellar/apache-spark/1.6.1" # # # Append pyspark to Python Path # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python") # sys.path.append("/usr/local/Cellar/apache-spark/1.6.1/libexec/python/lib/py4j-0.9-src.zip") from pyspark import SparkConf, SparkContext import collections conf = SparkConf().setMaster("local").setAppName("RatingsHistogram") sc = SparkContext(conf = conf) lines = sc.textFile("ml-100k/u.data") ratings = lines.map(lambda x: x.split()[2]) result = ratings.countByValue() sortedResults = collections.OrderedDict(sorted(result.items())) for key, value in sortedResults.iteritems(): print "%s %i" % (key, value)
a4b669b2c0bfad0859830ccaf4a1302f6e3c9c6c
core/geoloc.py
core/geoloc.py
import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']}
import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc = self.getLocationAPI(ip) geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']}
Fix : forgot to make the actual method call when reversing an IP.
Fix : forgot to make the actual method call when reversing an IP.
Python
mit
nocternology/fail2dash,nocternology/fail2dash
import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']} Fix : forgot to make the actual method call when reversing an IP.
import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc = self.getLocationAPI(ip) geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']}
<commit_before>import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']} <commit_msg>Fix : forgot to make the actual method call when reversing an IP.<commit_after>
import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc = self.getLocationAPI(ip) geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']}
import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']} Fix : forgot to make the actual method call when reversing an IP.import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc = self.getLocationAPI(ip) geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']}
<commit_before>import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']} <commit_msg>Fix : forgot to make the actual method call when reversing an IP.<commit_after>import urllib import json class Geoloc(object): """ Geoloc class definition. Given an IP adress, this object will try to reverse identify the IP using a geolocalisation API. On the return, it will spit back a list with : * IP adress, * Longitude, * Latitude, * Country, * Country flag """ def __init__(self, config): """ Inits the object by registering the configuration object """ self.config = config def get(self, ip): """ Metamethod that returns the full geoloc information for a given IP adress """ geoloc = self.getLocationAPI(ip) geoloc["ip"] = ip return geoloc def getLocationAPI(self, ip): """ Makes the actual call to the external API for IP geolookup """ try: response = urllib.urlopen(config.api_endpoint % ip) info = response.read() except Exception as e: # TODO : Add some kind of logging here if config.api_parser == "json": # Just in case you use an XML API or whatever result = self.parseJSON(info) # TODO : Get country flag from a local CSS/SVG # (see : https://github.com/lipis/flag-icon-css) def parseJSON(self, info): """ Gets a JSON message and parse it to keep only the relevant parts for us """ parsed = json.loads(info) return {'lat': parsed['latitude'], 'lon': parsed['longitude'], 'country': parsed['country'], 'code': parsed['country_code3']}
393819e9c81dd9b247553be4499216805e73eb23
stock_orderpoint_move_link/models/procurement_rule.py
stock_orderpoint_move_link/models/procurement_rule.py
# Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in vals['orderpoint_ids']] return vals
# Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in values['orderpoint_ids']] return vals
Fix read of wrong dictionnary
Fix read of wrong dictionnary
Python
agpl-3.0
OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse
# Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in vals['orderpoint_ids']] return vals Fix read of wrong dictionnary
# Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in values['orderpoint_ids']] return vals
<commit_before># Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in vals['orderpoint_ids']] return vals <commit_msg>Fix read of wrong dictionnary<commit_after>
# Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in values['orderpoint_ids']] return vals
# Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in vals['orderpoint_ids']] return vals Fix read of wrong dictionnary# Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in values['orderpoint_ids']] return vals
<commit_before># Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in vals['orderpoint_ids']] return vals <commit_msg>Fix read of wrong dictionnary<commit_after># Copyright 2017 Eficent Business and IT Consulting Services, S.L. # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html). from odoo import models class ProcurementRule(models.Model): _inherit = 'procurement.rule' def _get_stock_move_values(self, product_id, product_qty, product_uom, location_id, name, origin, values, group_id): vals = super(ProcurementRule, self)._get_stock_move_values( product_id, product_qty, product_uom, location_id, name, origin, values, group_id) if 'orderpoint_id' in values: vals['orderpoint_ids'] = [(4, values['orderpoint_id'].id)] elif 'orderpoint_ids' in values: vals['orderpoint_ids'] = [(4, o.id) for o in values['orderpoint_ids']] return vals
04bf65fa025902f92cd80b87f95c276e32487af0
tensorflow_datasets/image/binary_alpha_digits_test.py
tensorflow_datasets/image/binary_alpha_digits_test.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": "binaryalphadigs.mat", } if __name__ == "__main__": tfds_test.test_main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": tf.compat.as_text("binaryalphadigs.mat"), } if __name__ == "__main__": tfds_test.test_main()
Fix in test file for Binary Alpha Digit Dataset Issue-189
Fix in test file for Binary Alpha Digit Dataset Issue-189
Python
apache-2.0
tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": "binaryalphadigs.mat", } if __name__ == "__main__": tfds_test.test_main() Fix in test file for Binary Alpha Digit Dataset Issue-189
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": tf.compat.as_text("binaryalphadigs.mat"), } if __name__ == "__main__": tfds_test.test_main()
<commit_before>from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": "binaryalphadigs.mat", } if __name__ == "__main__": tfds_test.test_main() <commit_msg>Fix in test file for Binary Alpha Digit Dataset Issue-189<commit_after>
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": tf.compat.as_text("binaryalphadigs.mat"), } if __name__ == "__main__": tfds_test.test_main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": "binaryalphadigs.mat", } if __name__ == "__main__": tfds_test.test_main() Fix in test file for Binary Alpha Digit Dataset Issue-189from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": tf.compat.as_text("binaryalphadigs.mat"), } if __name__ == "__main__": tfds_test.test_main()
<commit_before>from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": "binaryalphadigs.mat", } if __name__ == "__main__": tfds_test.test_main() <commit_msg>Fix in test file for Binary Alpha Digit Dataset Issue-189<commit_after>from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tensorflow_datasets.image import binary_alpha_digits import tensorflow_datasets.testing as tfds_test class BinaryAlphaDigitsTest(tfds_test.DatasetBuilderTestCase): DATASET_CLASS = binary_alpha_digits.BinaryAlphaDigits SPLITS = { "train": 2, } DL_EXTRACT_RESULT = { "train": tf.compat.as_text("binaryalphadigs.mat"), } if __name__ == "__main__": tfds_test.test_main()
d9e9f8f1968ecc62a22b53dc58367cd8698b8bdb
project_generator/util.py
project_generator/util.py
# Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(l): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, l, ([], set()))[0] def flatten_list(l): all_items = [item if len(item) > 1 else sublist for sublist in l for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8'
# Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(_list): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, _list, ([], set()))[0] def flatten_list(_list): all_items = [item if len(item) > 1 else sublist for sublist in _list for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8'
Remove all traces of ls.
Remove all traces of ls.
Python
apache-2.0
0xc0170/project_generator,sarahmarshy/project_generator,project-generator/project_generator,hwfwgrp/project_generator,ohagendorf/project_generator,molejar/project_generator
# Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(l): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, l, ([], set()))[0] def flatten_list(l): all_items = [item if len(item) > 1 else sublist for sublist in l for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8' Remove all traces of ls.
# Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(_list): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, _list, ([], set()))[0] def flatten_list(_list): all_items = [item if len(item) > 1 else sublist for sublist in _list for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8'
<commit_before># Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(l): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, l, ([], set()))[0] def flatten_list(l): all_items = [item if len(item) > 1 else sublist for sublist in l for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8' <commit_msg>Remove all traces of ls.<commit_after>
# Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(_list): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, _list, ([], set()))[0] def flatten_list(_list): all_items = [item if len(item) > 1 else sublist for sublist in _list for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8'
# Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(l): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, l, ([], set()))[0] def flatten_list(l): all_items = [item if len(item) > 1 else sublist for sublist in l for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8' Remove all traces of ls.# Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(_list): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, _list, ([], set()))[0] def flatten_list(_list): all_items = [item if len(item) > 1 else sublist for sublist in _list for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8'
<commit_before># Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(l): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, l, ([], set()))[0] def flatten_list(l): all_items = [item if len(item) > 1 else sublist for sublist in l for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8' <commit_msg>Remove all traces of ls.<commit_after># Copyright 2014-2015 0xc0170 # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import shutil import locale def rmtree_if_exists(directory): if os.path.exists(directory): shutil.rmtree(directory) def uniqify(_list): # see: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order/29898968#29898968 reduce(lambda r, v: v in r[1] and r or (r[0].append(v) or r[1].add(v)) or r, _list, ([], set()))[0] def flatten_list(_list): all_items = [item if len(item) > 1 else sublist for sublist in _list for item in sublist] return uniqify(all_items) def unicode_available(): return locale.getdefaultlocale()[1] == 'UTF-8'
27f723226b2eca8cbfd4161d7993ebd78d329451
workshopvenues/venues/tests.py
workshopvenues/venues/tests.py
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0)
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address, Venue class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) def test_create_venue(self): # Create the address first a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() # Create the venue v = Venue() v.name = 'Venue Test' v.website = 'www.myvenue.com' v.address = a v.save() self.assertEqual(v.address.town, 'London') self.assertTrue(v.id >= 0)
Add Venue model creation test case
Add Venue model creation test case
Python
bsd-3-clause
andreagrandi/workshopvenues
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) Add Venue model creation test case
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address, Venue class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) def test_create_venue(self): # Create the address first a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() # Create the venue v = Venue() v.name = 'Venue Test' v.website = 'www.myvenue.com' v.address = a v.save() self.assertEqual(v.address.town, 'London') self.assertTrue(v.id >= 0)
<commit_before>""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) <commit_msg>Add Venue model creation test case<commit_after>
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address, Venue class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) def test_create_venue(self): # Create the address first a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() # Create the venue v = Venue() v.name = 'Venue Test' v.website = 'www.myvenue.com' v.address = a v.save() self.assertEqual(v.address.town, 'London') self.assertTrue(v.id >= 0)
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) Add Venue model creation test case""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address, Venue class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) def test_create_venue(self): # Create the address first a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() # Create the venue v = Venue() v.name = 'Venue Test' v.website = 'www.myvenue.com' v.address = a v.save() self.assertEqual(v.address.town, 'London') self.assertTrue(v.id >= 0)
<commit_before>""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) <commit_msg>Add Venue model creation test case<commit_after>""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from .models import Address, Venue class ModelsTest(TestCase): def test_create_address(self): a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() self.assertTrue(a.id >= 0) def test_create_venue(self): # Create the address first a = Address() a.street = '23, Test Street' a.town = 'London' a.postcode = 'xxxxx' a.country = 'UK' a.save() # Create the venue v = Venue() v.name = 'Venue Test' v.website = 'www.myvenue.com' v.address = a v.save() self.assertEqual(v.address.town, 'London') self.assertTrue(v.id >= 0)
aeb68225cc9c999b51b1733bffaf684280044c97
salt/utils/yamldumper.py
salt/utils/yamldumper.py
# -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
# -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' # pylint: disable=W0232 # class has no __init__ method from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
Disable W0232, no `__init__` method.
Disable W0232, no `__init__` method.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
# -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict) Disable W0232, no `__init__` method.
# -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' # pylint: disable=W0232 # class has no __init__ method from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
<commit_before># -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict) <commit_msg>Disable W0232, no `__init__` method.<commit_after>
# -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' # pylint: disable=W0232 # class has no __init__ method from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
# -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict) Disable W0232, no `__init__` method.# -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' # pylint: disable=W0232 # class has no __init__ method from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
<commit_before># -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict) <commit_msg>Disable W0232, no `__init__` method.<commit_after># -*- coding: utf-8 -*- ''' salt.utils.yamldumper ~~~~~~~~~~~~~~~~~~~~~ ''' # pylint: disable=W0232 # class has no __init__ method from __future__ import absolute_import try: from yaml import CDumper as Dumper from yaml import CSafeDumper as SafeDumper except ImportError: from yaml import Dumper from yaml import SafeDumper from salt.utils.odict import OrderedDict class OrderedDumper(Dumper): ''' A YAML dumper that represents python OrderedDict as simple YAML map. ''' class SafeOrderedDumper(SafeDumper): ''' A YAML safe dumper that represents python OrderedDict as simple YAML map. ''' def represent_ordereddict(dumper, data): return dumper.represent_dict(data.items()) OrderedDumper.add_representer(OrderedDict, represent_ordereddict) SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
48e9a83225fa6d745f011c894d2a5e06bc1a9d14
watcher.py
watcher.py
import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = mtime self._sleep(self.interval)
import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = currtime self._sleep(self.interval)
Fix loaded time variable name
Fix loaded time variable name
Python
mit
rolurq/flask-gulp
import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = mtime self._sleep(self.interval) Fix loaded time variable name
import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = currtime self._sleep(self.interval)
<commit_before>import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = mtime self._sleep(self.interval) <commit_msg>Fix loaded time variable name<commit_after>
import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = currtime self._sleep(self.interval)
import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = mtime self._sleep(self.interval) Fix loaded time variable nameimport os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = currtime self._sleep(self.interval)
<commit_before>import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = mtime self._sleep(self.interval) <commit_msg>Fix loaded time variable name<commit_after>import os from threading import Thread from werkzeug._reloader import ReloaderLoop class Watcher(Thread, ReloaderLoop): def __init__(self, paths, static, tasks, interval=1, *args, **kwargs): self.paths = paths self.static = static self.tasks = tasks super(Watcher, self).__init__(*args, **kwargs) ReloaderLoop.__init__(self, interval=interval) def run(self): times = {} while not self._Thread__stopped: for filename in self.paths: try: currtime = os.stat(filename).st_mtime except OSError: continue oldtime = times.get(filename) if oldtime and currtime > oldtime: self.static.run(*self.tasks) times[filename] = currtime self._sleep(self.interval)
3b4b108e6dc2dce46442d5f09fbfe59a61baa02a
api/admin/author.py
api/admin/author.py
from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled','friends','following','requests','pending'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False
from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False
Revert "Making more fields viewable"
Revert "Making more fields viewable"
Python
apache-2.0
CMPUT404/socialdistribution,CMPUT404/socialdistribution,CMPUT404/socialdistribution
from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled','friends','following','requests','pending'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False Revert "Making more fields viewable"
from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False
<commit_before>from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled','friends','following','requests','pending'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False <commit_msg>Revert "Making more fields viewable"<commit_after>
from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False
from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled','friends','following','requests','pending'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False Revert "Making more fields viewable"from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False
<commit_before>from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled','friends','following','requests','pending'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False <commit_msg>Revert "Making more fields viewable"<commit_after>from django.contrib.admin import ModelAdmin, BooleanFieldListFilter class AuthorOptions(ModelAdmin): list_display = ['id', 'user', 'github_username', 'host', 'bio', 'enabled'] list_editable = ['user', 'github_username', 'host', 'bio', 'enabled'] list_filter = ( ('enabled', BooleanFieldListFilter), ) def approve_author(self, request, queryset): try: queryset.update(enabled=True) self.message_user(request, "Account(s) successfully enabled") except: self.message_user(request, "Failed to enable account(s)") approve_author.short_description = "enable account(s)" def disable_author(self, request, queryset): try: queryset.update(enabled=False) self.message_user(request, "Account(s) successfully disabled") except: self.message_user(request, "Failed to disable account(s)") disable_author.short_description = "disable account(s)" actions = [approve_author, disable_author] class CachedAuthorOptions(ModelAdmin): list_display = ['id', 'displayname', 'host', 'url'] list_editable = ['displayname', 'host'] # Deletion should occur only through Author models and friend/followers def has_delete_permission(self, request, obj=None): return False
19c3375fe1da5e1a7335cd79374bac6fdf7befe6
pande_gas/utils/parallel_utils.py
pande_gas/utils/parallel_utils.py
""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--ip=*', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close()
""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close()
Remove --ip=* from ipcontroller command
Remove --ip=* from ipcontroller command
Python
bsd-3-clause
rbharath/pande-gas,rbharath/pande-gas
""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--ip=*', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close() Remove --ip=* from ipcontroller command
""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close()
<commit_before>""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--ip=*', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close() <commit_msg>Remove --ip=* from ipcontroller command<commit_after>
""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close()
""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--ip=*', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close() Remove --ip=* from ipcontroller command""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close()
<commit_before>""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--ip=*', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close() <commit_msg>Remove --ip=* from ipcontroller command<commit_after>""" IPython.parallel utilities. """ import os import subprocess import time import uuid class LocalCluster(object): """ Start an IPython.parallel cluster on localhost. Parameters ---------- n_engines : int Number of engines to initialize. """ def __init__(self, n_engines): self.n_engines = n_engines # placeholders self.cluster_id = None self.controller = None self.engines = [] self.output = None # initialize the cluster self.start() def __del__(self): """ Shut down the cluster. """ self.stop() def start(self): """ Start the cluster by running ipcontroller and ipengine. """ self.cluster_id = uuid.uuid4() output = open(os.devnull) self.controller = subprocess.Popen( ['ipcontroller', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) time.sleep(1) # wait for controller to initialize for i in xrange(self.n_engines): engine = subprocess.Popen( ['ipengine', '--cluster-id={}'.format(self.cluster_id), '--log-level=ERROR']) self.engines.append(engine) self.output = output time.sleep(10) # wait for engines to initialize def stop(self): """ Shut down the cluster. """ for engine in self.engines: engine.terminate() self.controller.terminate() self.output.close()
61a277bc61d0f646bd8d1285b3aa2025f6593953
app/applications.py
app/applications.py
from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}'))
from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count # 2. Stack application def base_converter(decimal_num, base): """Convert a decimal number to base 2 or 8 or 16.""" digits = '0123456789ABCDEF' remainder_stack = data_structures.Stack() conversion_result = '' while decimal_num > 0: remainder_stack.push(decimal_num % base) decimal_num = decimal_num // base while not remainder_stack.is_empty(): conversion_result += digits[remainder_stack.pop()] return conversion_result if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) [print(base_converter(233, base)) for base in [2, 8, 16]]
Apply stack in implementation of conversion from decimal to bin, oct & hex.
Apply stack in implementation of conversion from decimal to bin, oct & hex.
Python
mit
andela-kerinoso/data_structures_algo
from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) Apply stack in implementation of conversion from decimal to bin, oct & hex.
from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count # 2. Stack application def base_converter(decimal_num, base): """Convert a decimal number to base 2 or 8 or 16.""" digits = '0123456789ABCDEF' remainder_stack = data_structures.Stack() conversion_result = '' while decimal_num > 0: remainder_stack.push(decimal_num % base) decimal_num = decimal_num // base while not remainder_stack.is_empty(): conversion_result += digits[remainder_stack.pop()] return conversion_result if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) [print(base_converter(233, base)) for base in [2, 8, 16]]
<commit_before>from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) <commit_msg>Apply stack in implementation of conversion from decimal to bin, oct & hex.<commit_after>
from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count # 2. Stack application def base_converter(decimal_num, base): """Convert a decimal number to base 2 or 8 or 16.""" digits = '0123456789ABCDEF' remainder_stack = data_structures.Stack() conversion_result = '' while decimal_num > 0: remainder_stack.push(decimal_num % base) decimal_num = decimal_num // base while not remainder_stack.is_empty(): conversion_result += digits[remainder_stack.pop()] return conversion_result if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) [print(base_converter(233, base)) for base in [2, 8, 16]]
from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) Apply stack in implementation of conversion from decimal to bin, oct & hex.from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count # 2. Stack application def base_converter(decimal_num, base): """Convert a decimal number to base 2 or 8 or 16.""" digits = '0123456789ABCDEF' remainder_stack = data_structures.Stack() conversion_result = '' while decimal_num > 0: remainder_stack.push(decimal_num % base) decimal_num = decimal_num // base while not remainder_stack.is_empty(): conversion_result += digits[remainder_stack.pop()] return conversion_result if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) [print(base_converter(233, base)) for base in [2, 8, 16]]
<commit_before>from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) <commit_msg>Apply stack in implementation of conversion from decimal to bin, oct & hex.<commit_after>from . import data_structures # 1. Stack application def balanced_parentheses_checker(symbol_string): """Verify that a set of parentheses is balanced.""" opening_symbols = '{[(' closing_symbols = '}])' opening_symbols_stack = data_structures.Stack() symbol_count = len(symbol_string) counter = 0 while counter < symbol_count: current_symbol = symbol_string[counter] if current_symbol in '{[(': opening_symbols_stack.push(current_symbol) else: if not opening_symbols_stack.is_empty() and \ opening_symbols.index(opening_symbols_stack.peek()) == \ closing_symbols.index(current_symbol): opening_symbols_stack.pop() else: counter = symbol_count counter += 1 return opening_symbols_stack.is_empty() and counter == symbol_count # 2. Stack application def base_converter(decimal_num, base): """Convert a decimal number to base 2 or 8 or 16.""" digits = '0123456789ABCDEF' remainder_stack = data_structures.Stack() conversion_result = '' while decimal_num > 0: remainder_stack.push(decimal_num % base) decimal_num = decimal_num // base while not remainder_stack.is_empty(): conversion_result += digits[remainder_stack.pop()] return conversion_result if __name__ == '__main__': print(balanced_parentheses_checker('[]{[]{([][])}()}')) [print(base_converter(233, base)) for base in [2, 8, 16]]
8bca1bd18697821cbf5269e93f6569a02470c040
attributes.py
attributes.py
#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^ *:anon ([^ ]+): ([^ ]+)$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration()
#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^\s*:anon\s+(\S+):\s+(\S+)\s+(\S+)?\s*$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration()
Fix regex to optionally match substitution type and better whitespace support
Fix regex to optionally match substitution type and better whitespace support
Python
apache-2.0
rcbau/fuzzy-happiness
#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^ *:anon ([^ ]+): ([^ ]+)$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration() Fix regex to optionally match substitution type and better whitespace support
#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^\s*:anon\s+(\S+):\s+(\S+)\s+(\S+)?\s*$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration()
<commit_before>#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^ *:anon ([^ ]+): ([^ ]+)$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration() <commit_msg>Fix regex to optionally match substitution type and better whitespace support<commit_after>
#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^\s*:anon\s+(\S+):\s+(\S+)\s+(\S+)?\s*$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration()
#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^ *:anon ([^ ]+): ([^ ]+)$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration() Fix regex to optionally match substitution type and better whitespace support#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^\s*:anon\s+(\S+):\s+(\S+)\s+(\S+)?\s*$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration()
<commit_before>#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^ *:anon ([^ ]+): ([^ ]+)$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration() <commit_msg>Fix regex to optionally match substitution type and better whitespace support<commit_after>#!/usr/bin/python # Read doc comments and work out what fields to anonymize import inspect import re from nova.db.sqlalchemy import models ANON_CONFIG_RE = re.compile('^\s*:anon\s+(\S+):\s+(\S+)\s+(\S+)?\s*$') def load_configuration(): configs = {} for name, obj in inspect.getmembers(models): if not inspect.isclass(obj): continue if not issubclass(obj, models.NovaBase): continue if not obj.__doc__: continue attributes = [] for line in obj.__doc__.split('\n'): m = ANON_CONFIG_RE.match(line) if m: attributes.append((m.group(1), m.group(2))) if attributes: configs[name] = attributes return configs if __name__ == '__main__': print load_configuration()