commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7ff1f860f9cff5dcec86588485b9f2ce992fdd7c | widget.py | widget.py | #!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
pass
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return ' {} '.format(self.name)
| #!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
def set_action(self, string)
return '%{{A{1}:{2}:}}{0}%{{A}}'.format(string, button, action)
def set_bg(self, string):
return '%{{B{1}}}{0}%{{B{1}}}'.format(string, self.bg)
def set_fg(self, string):
return '%{{F{1}}}{0}%{{F{1}}}'.format(string, self.fg)
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return self.set_fg(' {} '.format(self.name))
if __name__ == '__main__':
a = Widget('a')
print(a.get_output())
| Add bg and fg function. | Add bg and fg function.
| Python | mit | alberand/lemonbar,alberand/lemonbar,alberand/lemonbar | #!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
pass
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return ' {} '.format(self.name)
Add bg and fg function. | #!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
def set_action(self, string)
return '%{{A{1}:{2}:}}{0}%{{A}}'.format(string, button, action)
def set_bg(self, string):
return '%{{B{1}}}{0}%{{B{1}}}'.format(string, self.bg)
def set_fg(self, string):
return '%{{F{1}}}{0}%{{F{1}}}'.format(string, self.fg)
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return self.set_fg(' {} '.format(self.name))
if __name__ == '__main__':
a = Widget('a')
print(a.get_output())
| <commit_before>#!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
pass
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return ' {} '.format(self.name)
<commit_msg>Add bg and fg function.<commit_after> | #!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
def set_action(self, string)
return '%{{A{1}:{2}:}}{0}%{{A}}'.format(string, button, action)
def set_bg(self, string):
return '%{{B{1}}}{0}%{{B{1}}}'.format(string, self.bg)
def set_fg(self, string):
return '%{{F{1}}}{0}%{{F{1}}}'.format(string, self.fg)
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return self.set_fg(' {} '.format(self.name))
if __name__ == '__main__':
a = Widget('a')
print(a.get_output())
| #!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
pass
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return ' {} '.format(self.name)
Add bg and fg function.#!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
def set_action(self, string)
return '%{{A{1}:{2}:}}{0}%{{A}}'.format(string, button, action)
def set_bg(self, string):
return '%{{B{1}}}{0}%{{B{1}}}'.format(string, self.bg)
def set_fg(self, string):
return '%{{F{1}}}{0}%{{F{1}}}'.format(string, self.fg)
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return self.set_fg(' {} '.format(self.name))
if __name__ == '__main__':
a = Widget('a')
print(a.get_output())
| <commit_before>#!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
pass
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return ' {} '.format(self.name)
<commit_msg>Add bg and fg function.<commit_after>#!/usr/bin/env python3
from config import colors, icons
# TODO properties instead of GETs/SETs
class Widget:
'''
Abstrac class for all lemonbar widgets.
'''
def __init__(self, name):
'''
Params:
bg: background color
fg: foreground color
icon: icon
icon_p: position of the icon. 0 to the left of the text, 1 to the
right of the text.
'''
# Temp
self.name = name
self.bg = colors['c_background']
self.fg = colors['c_white']
self.icon = icons['laptop']
self.icon_p = 0
self.colors_rules = dict()
def update(self):
'''
Update widget status.
'''
pass
def action(self):
'''
Implement if widget should execute any aciont, commands, programs...
'''
def set_action(self, string)
return '%{{A{1}:{2}:}}{0}%{{A}}'.format(string, button, action)
def set_bg(self, string):
return '%{{B{1}}}{0}%{{B{1}}}'.format(string, self.bg)
def set_fg(self, string):
return '%{{F{1}}}{0}%{{F{1}}}'.format(string, self.fg)
def get_output(self):
'''
Returns generated string for lemonbar.
'''
return self.set_fg(' {} '.format(self.name))
if __name__ == '__main__':
a = Widget('a')
print(a.get_output())
|
89b23ce8abd259ace055c35b0da47428bdcbc37a | scripts/server/client_example.py | scripts/server/client_example.py | #!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
| #!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
| Fix decoding error with python2 | Fix decoding error with python2
| Python | mit | marian-nmt/marian-train,emjotde/amunn,amunmt/marian,emjotde/amunmt,emjotde/amunmt,emjotde/amunmt,marian-nmt/marian-train,marian-nmt/marian-train,marian-nmt/marian-train,amunmt/marian,amunmt/marian,emjotde/amunn,emjotde/amunn,marian-nmt/marian-train,emjotde/amunmt,emjotde/amunn,emjotde/Marian,emjotde/Marian | #!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
Fix decoding error with python2 | #!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
| <commit_before>#!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
<commit_msg>Fix decoding error with python2<commit_after> | #!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
| #!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
Fix decoding error with python2#!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
| <commit_before>#!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
<commit_msg>Fix decoding error with python2<commit_after>#!/usr/bin/env python
from __future__ import print_function, unicode_literals, division
import sys
import time
import argparse
from websocket import create_connection
def translate(batch, port=8080):
ws = create_connection("ws://localhost:{}/translate".format(port))
#print(batch.rstrip())
ws.send(batch)
result = ws.recv()
print(result.rstrip())
ws.close()
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-b", "--batch-size", type=int, default=1)
parser.add_argument("-p", "--port", type=int, default=8080)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
count = 0
batch = ""
for line in sys.stdin:
count += 1
batch += line.decode('utf-8') if sys.version_info < (3, 0) else line
if count == args.batch_size:
translate(batch, port=args.port)
count = 0
batch = ""
if count:
translate(batch, port=args.port)
|
e0e222420242deba4e5ec8b9ddb931ba06728b23 | apps/podcast-transcribe-episode/tests/python/random_gcs_prefix.py | apps/podcast-transcribe-episode/tests/python/random_gcs_prefix.py | import abc
import datetime
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
| import abc
import datetime
from mediawords.util.text import random_string
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}-{random_string(length=32)}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
| Make random string a bit more random | Make random string a bit more random
| Python | agpl-3.0 | berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud | import abc
import datetime
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
Make random string a bit more random | import abc
import datetime
from mediawords.util.text import random_string
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}-{random_string(length=32)}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
| <commit_before>import abc
import datetime
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
<commit_msg>Make random string a bit more random<commit_after> | import abc
import datetime
from mediawords.util.text import random_string
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}-{random_string(length=32)}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
| import abc
import datetime
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
Make random string a bit more randomimport abc
import datetime
from mediawords.util.text import random_string
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}-{random_string(length=32)}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
| <commit_before>import abc
import datetime
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
<commit_msg>Make random string a bit more random<commit_after>import abc
import datetime
from mediawords.util.text import random_string
from podcast_transcribe_episode.config import (
AbstractGCBucketConfig,
RawEnclosuresBucketConfig,
TranscodedEpisodesBucketConfig,
TranscriptsBucketConfig,
)
class RandomGCSPrefixMixin(AbstractGCBucketConfig, metaclass=abc.ABCMeta):
"""
Generates a random path prefix to store the objects at.
Makes it easier to debug what gets written to GCS and get rid of said objects afterwards.
"""
__slots__ = [
'__random_prefix',
]
def __init__(self):
super().__init__()
date = datetime.datetime.utcnow().isoformat()
date = date.replace(':', '_')
self.__random_prefix = f'tests-{date}-{random_string(length=32)}'
def path_prefix(self) -> str:
return self.__random_prefix
class RandomPrefixRawEnclosuresBucketConfig(RandomGCSPrefixMixin, RawEnclosuresBucketConfig):
pass
class RandomPrefixTranscodedEpisodesBucketConfig(RandomGCSPrefixMixin, TranscodedEpisodesBucketConfig):
pass
class RandomPrefixTranscriptsBucketConfig(RandomGCSPrefixMixin, TranscriptsBucketConfig):
pass
|
929909513e71282de388cf4e93476ba614e6c0c5 | Malcom/feeds/malwaredomains.py | Malcom/feeds/malwaredomains.py | import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', splitted_mdl[3]]
evil['reference'] = splitted_mdl[4]
return hostname, evil
| import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', re.sub(r'[^\w]', '', splitted_mdl[3])]
evil['reference'] = splitted_mdl[4]
return hostname, evil
| Deal with MalwareDomains non-ASCII characters | Deal with MalwareDomains non-ASCII characters
| Python | apache-2.0 | yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti | import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', splitted_mdl[3]]
evil['reference'] = splitted_mdl[4]
return hostname, evil
Deal with MalwareDomains non-ASCII characters | import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', re.sub(r'[^\w]', '', splitted_mdl[3])]
evil['reference'] = splitted_mdl[4]
return hostname, evil
| <commit_before>import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', splitted_mdl[3]]
evil['reference'] = splitted_mdl[4]
return hostname, evil
<commit_msg>Deal with MalwareDomains non-ASCII characters<commit_after> | import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', re.sub(r'[^\w]', '', splitted_mdl[3])]
evil['reference'] = splitted_mdl[4]
return hostname, evil
| import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', splitted_mdl[3]]
evil['reference'] = splitted_mdl[4]
return hostname, evil
Deal with MalwareDomains non-ASCII charactersimport urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', re.sub(r'[^\w]', '', splitted_mdl[3])]
evil['reference'] = splitted_mdl[4]
return hostname, evil
| <commit_before>import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', splitted_mdl[3]]
evil['reference'] = splitted_mdl[4]
return hostname, evil
<commit_msg>Deal with MalwareDomains non-ASCII characters<commit_after>import urllib2
import re
from Malcom.model.datatypes import Hostname, Evil
from feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalwareDomains(Feed):
def __init__(self, name):
super(MalwareDomains, self).__init__(name)
self.source = "http://mirror1.malwaredomains.com/files/domains.txt"
self.description = "Malware domains blocklist"
self.confidence = 50
self.name = "MalwareDomains"
def update(self):
self.update_lines()
def analyze(self, line):
if line.startswith('#') or line.startswith('\n'):
return
splitted_mdl = line.split('\t')
# 20151201 agasi-story.info malicious blog.dynamoo.com 20131130 20121201 20120521 20110217
# Create the new hostname and store it in the DB
hostname = Hostname(hostname=splitted_mdl[2])
if hostname['value'] == None: return # hostname not found
evil = Evil()
evil['value'] = "Malware domain blocklist (%s)" % hostname['value']
evil['tags'] = ['malwaredomains', re.sub(r'[^\w]', '', splitted_mdl[3])]
evil['reference'] = splitted_mdl[4]
return hostname, evil
|
2804ac090444e20f1a4899234a49cae8c3142003 | simuvex/procedures/libc___so___6/__init__.py | simuvex/procedures/libc___so___6/__init__.py |
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'X64': {
'fd': 0x70,
},
} |
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'AMD64': {
'fd': 0x70,
},
} | Rename X64 to AMD64 in libc SimProcedures to be consistent with architecture names in archinfo | Rename X64 to AMD64 in libc SimProcedures to be consistent with architecture names in archinfo
| Python | bsd-2-clause | tyb0807/angr,angr/angr,axt/angr,f-prettyland/angr,f-prettyland/angr,tyb0807/angr,iamahuman/angr,axt/angr,chubbymaggie/simuvex,axt/angr,iamahuman/angr,chubbymaggie/angr,chubbymaggie/angr,angr/angr,f-prettyland/angr,schieb/angr,angr/angr,schieb/angr,tyb0807/angr,chubbymaggie/angr,chubbymaggie/simuvex,iamahuman/angr,schieb/angr,chubbymaggie/simuvex,angr/simuvex |
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'X64': {
'fd': 0x70,
},
}Rename X64 to AMD64 in libc SimProcedures to be consistent with architecture names in archinfo |
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'AMD64': {
'fd': 0x70,
},
} | <commit_before>
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'X64': {
'fd': 0x70,
},
}<commit_msg>Rename X64 to AMD64 in libc SimProcedures to be consistent with architecture names in archinfo<commit_after> |
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'AMD64': {
'fd': 0x70,
},
} |
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'X64': {
'fd': 0x70,
},
}Rename X64 to AMD64 in libc SimProcedures to be consistent with architecture names in archinfo
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'AMD64': {
'fd': 0x70,
},
} | <commit_before>
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'X64': {
'fd': 0x70,
},
}<commit_msg>Rename X64 to AMD64 in libc SimProcedures to be consistent with architecture names in archinfo<commit_after>
#
# offsets in struct _IO_FILE
#
_IO_FILE = {
'X86': {
'fd': 0x38,
},
'AMD64': {
'fd': 0x70,
},
} |
c9449516bc3bfd15873347d1233001c51939a5e6 | pipeline/utils/backend_helper.py | pipeline/utils/backend_helper.py | """One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/99-bottles-demo',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
| """One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/dev',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
| Fix the bad jobboard path. | Fix the bad jobboard path.
Change-Id: I3281babfa835d7d4b76f7f299887959fa5342e85
| Python | apache-2.0 | ethanbao/artman,ethanbao/artman,googleapis/artman,googleapis/artman,shinfan/artman,googleapis/artman | """One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/99-bottles-demo',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
Fix the bad jobboard path.
Change-Id: I3281babfa835d7d4b76f7f299887959fa5342e85 | """One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/dev',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
| <commit_before>"""One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/99-bottles-demo',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
<commit_msg>Fix the bad jobboard path.
Change-Id: I3281babfa835d7d4b76f7f299887959fa5342e85<commit_after> | """One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/dev',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
| """One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/99-bottles-demo',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
Fix the bad jobboard path.
Change-Id: I3281babfa835d7d4b76f7f299887959fa5342e85"""One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/dev',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
| <commit_before>"""One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/99-bottles-demo',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
<commit_msg>Fix the bad jobboard path.
Change-Id: I3281babfa835d7d4b76f7f299887959fa5342e85<commit_after>"""One-line documentation for backend_helper module.
A detailed description of backend_helper.
"""
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
# Default host/port of ZooKeeper service.
ZK_HOST = '104.197.150.171:2181'
# Default jobboard configuration.
JB_CONF = {
'hosts': ZK_HOST,
'board': 'zookeeper',
'path': '/taskflow/dev',
}
# Default persistence configuration.
PERSISTENCE_CONF = {
'connection': 'zookeeper',
'hosts': ZK_HOST,
'path': '/taskflow/persistence',
}
def default_persistence_backend():
return persistence_backends.fetch(PERSISTENCE_CONF)
def default_jobboard_backend(name):
return job_backends.fetch(name,
JB_CONF,
persistence=default_persistence_backend())
|
f8e89b105a69e624ef853d102310284f5441bae5 | QuantifiedDevOpenDashboardCommand.py | QuantifiedDevOpenDashboardCommand.py | import sublime, sublime_plugin, webbrowser
QD_URL = "http://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url) | import sublime, sublime_plugin, webbrowser
QD_URL = "https://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
| Use https dashboard url when using 'Go to dashboard' | Use https dashboard url when using 'Go to dashboard'
| Python | apache-2.0 | 1self/sublime-text-plugin,1self/sublime-text-plugin,1self/sublime-text-plugin | import sublime, sublime_plugin, webbrowser
QD_URL = "http://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)Use https dashboard url when using 'Go to dashboard' | import sublime, sublime_plugin, webbrowser
QD_URL = "https://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
| <commit_before>import sublime, sublime_plugin, webbrowser
QD_URL = "http://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)<commit_msg>Use https dashboard url when using 'Go to dashboard'<commit_after> | import sublime, sublime_plugin, webbrowser
QD_URL = "https://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
| import sublime, sublime_plugin, webbrowser
QD_URL = "http://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)Use https dashboard url when using 'Go to dashboard'import sublime, sublime_plugin, webbrowser
QD_URL = "https://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
| <commit_before>import sublime, sublime_plugin, webbrowser
QD_URL = "http://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)<commit_msg>Use https dashboard url when using 'Go to dashboard'<commit_after>import sublime, sublime_plugin, webbrowser
QD_URL = "https://app.quantifieddev.org"
class GoToQuantifiedDevDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "QuantifiedDev.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s" % locals()
webbrowser.open_new_tab(url)
|
df638a33d6f0812a22bb775fded2d1790bd1e409 | router/config/settings.py | router/config/settings.py | import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
| import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
import django
django.setup()
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
| Call `django.setup()` in router app | Call `django.setup()` in router app
This should have been there before, but somehow we managed to get away
without it :)
fixes #99
| Python | agpl-3.0 | Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen | import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
Call `django.setup()` in router app
This should have been there before, but somehow we managed to get away
without it :)
fixes #99 | import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
import django
django.setup()
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
| <commit_before>import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
<commit_msg>Call `django.setup()` in router app
This should have been there before, but somehow we managed to get away
without it :)
fixes #99<commit_after> | import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
import django
django.setup()
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
| import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
Call `django.setup()` in router app
This should have been there before, but somehow we managed to get away
without it :)
fixes #99import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
import django
django.setup()
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
| <commit_before>import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
<commit_msg>Call `django.setup()` in router app
This should have been there before, but somehow we managed to get away
without it :)
fixes #99<commit_after>import os
import sys
from salmon.server import SMTPReceiver, LMTPReceiver
sys.path.append('..')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.conf import settings
import django
django.setup()
# where to listen for incoming messages
if settings.SALMON_SERVER["type"] == "lmtp":
receiver = LMTPReceiver(socket=settings.SALMON_SERVER["path"])
elif settings.SALMON_SERVER["type"] == "smtp":
receiver = SMTPReceiver(settings.SALMON_SERVER['host'],
settings.SALMON_SERVER['port'])
|
6590f92c1423ab37570857e2c6cc726e1a7fede7 | _setup_database.py | _setup_database.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
# migrating players from json file to database
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
create_player_data(simulation=False) | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
| Introduce command line parameters for database setup script | Introduce command line parameters for database setup script
| Python | mit | leaffan/pynhldb | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
# migrating players from json file to database
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
create_player_data(simulation=False)Introduce command line parameters for database setup script | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
# migrating players from json file to database
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
create_player_data(simulation=False)<commit_msg>Introduce command line parameters for database setup script<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
# migrating players from json file to database
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
create_player_data(simulation=False)Introduce command line parameters for database setup script#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
# migrating teams from json file to database
migrate_teams(simulation=True)
# creating divisions from division configuration file
create_divisions(simulation=True)
# migrating players from json file to database
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
create_player_data(simulation=False)<commit_msg>Introduce command line parameters for database setup script<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
|
4a6ccb58bade2cefc7baa9424f1747275adaa166 | antxetamedia/archive/filtersets.py | antxetamedia/archive/filtersets.py | from django_filters import FilterSet
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
class Meta:
model = NewsPodcast
fields = {
'show': ['exact'],
'categories': ['exact'],
'pub_date': ['gte', 'lte'],
}
class RadioPodcastFilterSet(FilterSet):
class Meta:
model = RadioPodcast
fields = {
'show': ['exact'],
'show__category': ['exact'],
'show__producer': ['exact'],
'pub_date': ['gte', 'lte'],
}
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
| from django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet, DateTimeFilter
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = NewsPodcast
fields = ['show', 'categories', 'pub_date_after', 'pub_date_before']
class RadioPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = RadioPodcast
fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before']
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
| Add labels to the pub_date__lte pub_date__gte filters | Add labels to the pub_date__lte pub_date__gte filters
| Python | agpl-3.0 | GISAElkartea/amv2,GISAElkartea/amv2,GISAElkartea/amv2 | from django_filters import FilterSet
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
class Meta:
model = NewsPodcast
fields = {
'show': ['exact'],
'categories': ['exact'],
'pub_date': ['gte', 'lte'],
}
class RadioPodcastFilterSet(FilterSet):
class Meta:
model = RadioPodcast
fields = {
'show': ['exact'],
'show__category': ['exact'],
'show__producer': ['exact'],
'pub_date': ['gte', 'lte'],
}
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
Add labels to the pub_date__lte pub_date__gte filters | from django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet, DateTimeFilter
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = NewsPodcast
fields = ['show', 'categories', 'pub_date_after', 'pub_date_before']
class RadioPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = RadioPodcast
fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before']
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
| <commit_before>from django_filters import FilterSet
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
class Meta:
model = NewsPodcast
fields = {
'show': ['exact'],
'categories': ['exact'],
'pub_date': ['gte', 'lte'],
}
class RadioPodcastFilterSet(FilterSet):
class Meta:
model = RadioPodcast
fields = {
'show': ['exact'],
'show__category': ['exact'],
'show__producer': ['exact'],
'pub_date': ['gte', 'lte'],
}
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
<commit_msg>Add labels to the pub_date__lte pub_date__gte filters<commit_after> | from django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet, DateTimeFilter
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = NewsPodcast
fields = ['show', 'categories', 'pub_date_after', 'pub_date_before']
class RadioPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = RadioPodcast
fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before']
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
| from django_filters import FilterSet
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
class Meta:
model = NewsPodcast
fields = {
'show': ['exact'],
'categories': ['exact'],
'pub_date': ['gte', 'lte'],
}
class RadioPodcastFilterSet(FilterSet):
class Meta:
model = RadioPodcast
fields = {
'show': ['exact'],
'show__category': ['exact'],
'show__producer': ['exact'],
'pub_date': ['gte', 'lte'],
}
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
Add labels to the pub_date__lte pub_date__gte filtersfrom django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet, DateTimeFilter
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = NewsPodcast
fields = ['show', 'categories', 'pub_date_after', 'pub_date_before']
class RadioPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = RadioPodcast
fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before']
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
| <commit_before>from django_filters import FilterSet
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
class Meta:
model = NewsPodcast
fields = {
'show': ['exact'],
'categories': ['exact'],
'pub_date': ['gte', 'lte'],
}
class RadioPodcastFilterSet(FilterSet):
class Meta:
model = RadioPodcast
fields = {
'show': ['exact'],
'show__category': ['exact'],
'show__producer': ['exact'],
'pub_date': ['gte', 'lte'],
}
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
<commit_msg>Add labels to the pub_date__lte pub_date__gte filters<commit_after>from django.utils.translation import ugettext_lazy as _
from django_filters import FilterSet, DateTimeFilter
from antxetamedia.news.models import NewsPodcast
from antxetamedia.radio.models import RadioPodcast
from antxetamedia.projects.models import ProjectShow
# We do not want to accidentally discard anything, so be inclusive and always
# make gte and lte lookups instead of using gt or lt ones
class NewsPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = NewsPodcast
fields = ['show', 'categories', 'pub_date_after', 'pub_date_before']
class RadioPodcastFilterSet(FilterSet):
pub_date_after = DateTimeFilter('pub_date', lookup_type='gte', label=_('Published after'))
pub_date_before = DateTimeFilter('pub_date', lookup_type='lte', label=_('Published before'))
class Meta:
model = RadioPodcast
fields = ['show', 'show__category', 'show__producer', 'pub_date_after', 'pub_date_before']
class ProjectShowFilterSet(FilterSet):
class Meta:
model = ProjectShow
fields = {
'producer': ['exact'],
'creation_date': ['year__exact'],
}
|
4375e1d72832f9672eaba87019be9b769eb69e78 | alg_hash_string.py | alg_hash_string.py | from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
| Add importing absolute_import & division from Prague | Add importing absolute_import & division from Prague
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
Add importing absolute_import & division from Prague | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
| <commit_before>from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
<commit_msg>Add importing absolute_import & division from Prague<commit_after> | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
| from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
Add importing absolute_import & division from Praguefrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
| <commit_before>from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
<commit_msg>Add importing absolute_import & division from Prague<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def hash_str(a_str, table_size):
"""Hash a string by the folding method.
- Get ordinal number for each char.
- Sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for c in a_str:
sum += ord(c)
return sum % table_size
def weighted_hash_str(a_str, table_size):
"""Weighted-Hash a string by the folding method.
- Get ordinal number for each char.
- Weighted-sum all of the ordinal numbers.
- Return the remainder of the sum with table_size.
"""
sum = 0
for i, c in enumerate(a_str):
sum += (i + 1) * ord(c)
return sum % table_size
def main():
a_str = 'cat'
print('For hash_str(): {}'.format(hash_str(a_str, 11)))
print('For weighted_hash_str(): {}'
.format(weighted_hash_str(a_str, 11)))
if __name__ == '__main__':
main()
|
9a49ce93428d6e7bdfeebbed906a1868dd844169 | anycluster/urls.py | anycluster/urls.py | from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
| from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
| Update url format to support Django 1.10 | Update url format to support Django 1.10 | Python | mit | biodiv/anycluster,biodiv/anycluster,biodiv/anycluster,biodiv/anycluster,biodiv/anycluster | from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
Update url format to support Django 1.10 | from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
| <commit_before>from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
<commit_msg>Update url format to support Django 1.10<commit_after> | from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
| from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
Update url format to support Django 1.10from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
| <commit_before>from django.conf.urls import patterns, url
from anycluster import views
from django.conf import settings
urlpatterns = patterns('',
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
)
<commit_msg>Update url format to support Django 1.10<commit_after>from django.conf.urls import url
from anycluster import views
from django.conf import settings
urlpatterns = [
url(r'^grid/(\d+)/(\d+)/$', views.getGrid, name='getGrid'),
url(r'^kmeans/(\d+)/(\d+)/$', views.getPins, name='getPins'),
url(r'^getClusterContent/(\d+)/(\d+)/$', views.getClusterContent, name='getClusterContent'),
url(r'^getAreaContent/(\d+)/(\d+)/$', views.getAreaContent, name='getAreaContent'),
]
|
c5d4c0cbfced859407c5569d879cfb7b9815eb57 | alerts/lib/alert_plugin_set.py | alerts/lib/alert_plugin_set.py | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
| import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
| Convert debug message into unicode string | Convert debug message into unicode string
| Python | mpl-2.0 | Phrozyn/MozDef,mozilla/MozDef,gdestuynder/MozDef,mozilla/MozDef,Phrozyn/MozDef,mozilla/MozDef,mpurzynski/MozDef,mpurzynski/MozDef,jeffbryner/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,mpurzynski/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,gdestuynder/MozDef,mozilla/MozDef | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
Convert debug message into unicode string | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
| <commit_before>import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
<commit_msg>Convert debug message into unicode string<commit_after> | import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
| import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
Convert debug message into unicode stringimport os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
| <commit_before>import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = '{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
<commit_msg>Convert debug message into unicode string<commit_after>import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), "../../lib"))
from plugin_set import PluginSet
from utilities.logger import logger
class AlertPluginSet(PluginSet):
def send_message_to_plugin(self, plugin_class, message, metadata=None):
if 'utctimestamp' in message and 'summary' in message:
message_log_str = u'{0} received message: ({1}) {2}'.format(plugin_class.__module__, message['utctimestamp'], message['summary'])
logger.info(message_log_str)
return plugin_class.onMessage(message), metadata
|
aa8611e43d31e07b9105cca13e4cb9c80479679b | tailor/listeners/mainlistener.py | tailor/listeners/mainlistener.py | from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
| from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Struct names should be in UpperCamelCase')
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
| Implement UpperCamelCase name check for structs | Implement UpperCamelCase name check for structs
| Python | mit | sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor,sleekbyte/tailor | from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
Implement UpperCamelCase name check for structs | from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Struct names should be in UpperCamelCase')
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
| <commit_before>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
<commit_msg>Implement UpperCamelCase name check for structs<commit_after> | from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Struct names should be in UpperCamelCase')
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
| from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
Implement UpperCamelCase name check for structsfrom tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Struct names should be in UpperCamelCase')
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
| <commit_before>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
pass
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
<commit_msg>Implement UpperCamelCase name check for structs<commit_after>from tailor.swift.swiftlistener import SwiftListener
from tailor.utils.charformat import isUpperCamelCase
class MainListener(SwiftListener):
def enterClassName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Class names should be in UpperCamelCase')
def enterEnumName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum names should be in UpperCamelCase')
def enterEnumCaseName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Enum case names should be in UpperCamelCase')
def enterStructName(self, ctx):
self.__verify_upper_camel_case(ctx, 'Struct names should be in UpperCamelCase')
@staticmethod
def __verify_upper_camel_case(ctx, err_msg):
construct_name = ctx.getText()
if not isUpperCamelCase(construct_name):
print('Line', str(ctx.start.line) + ':', err_msg)
|
7106dd7d9fb9a4df94ac6694cf52f16a5b6677e7 | apps/feeds/models.py | apps/feeds/models.py | import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.link)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
| import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.url)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
| Remove errant reference to old link field | Remove errant reference to old link field
| Python | bsd-3-clause | mozilla/mozilla-ignite,mozilla/betafarm,mozilla/mozilla-ignite,mozilla/mozilla-ignite,mozilla/betafarm,mozilla/betafarm,mozilla/betafarm,mozilla/mozilla-ignite | import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.link)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
Remove errant reference to old link field | import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.url)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
| <commit_before>import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.link)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
<commit_msg>Remove errant reference to old link field<commit_after> | import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.url)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
| import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.link)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
Remove errant reference to old link fieldimport datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.url)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
| <commit_before>import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.link)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
<commit_msg>Remove errant reference to old link field<commit_after>import datetime
from django.contrib import admin
from django.db import models
from django.db.models.signals import post_save
from activity.models import broadcast
class Entry(models.Model):
title = models.CharField(max_length=100)
published = models.DateTimeField(default=datetime.datetime.now())
url = models.URLField()
body = models.TextField()
link = models.ForeignKey('projects.Link')
class Meta:
verbose_name_plural = u'entries'
def __unicode__(self):
return u'%s -> %s' % (self.title, self.url)
@property
def project(self):
return self.link.project or None
admin.site.register(Entry)
def entry_save_handler(sender, instance, **kwargs):
broadcast(instance)
post_save.connect(entry_save_handler, sender=Entry)
|
fe0867e5499b627e776d132d300d17b40858dcab | line_profiler.py | line_profiler.py | from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
| from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
def run(self, cmd):
""" Profile a single executable statment in the main namespace.
"""
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
""" Profile a single executable statement in the given namespaces.
"""
self.enable_by_count()
try:
exec cmd in globals, locals
finally:
self.disable_by_count()
return self
def runcall(self, func, *args, **kw):
""" Profile a single function call.
"""
self.enable_by_count()
try:
return func(*args, **kw)
finally:
self.disable_by_count()
| Add the typical run/runctx/runcall methods. | ENH: Add the typical run/runctx/runcall methods.
| Python | bsd-3-clause | amegianeg/line_profiler,jstasiak/line_profiler,dreampuf/lprofiler,dreampuf/lprofiler,eblur/line_profiler,jstasiak/line_profiler,ymero/line_profiler,eblur/line_profiler,certik/line_profiler,certik/line_profiler,amegianeg/line_profiler,Doctorhoenikker/line_profiler,jsalva/line_profiler,Doctorhoenikker/line_profiler,ymero/line_profiler,jsalva/line_profiler,dreampuf/lprofiler | from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
ENH: Add the typical run/runctx/runcall methods. | from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
def run(self, cmd):
""" Profile a single executable statment in the main namespace.
"""
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
""" Profile a single executable statement in the given namespaces.
"""
self.enable_by_count()
try:
exec cmd in globals, locals
finally:
self.disable_by_count()
return self
def runcall(self, func, *args, **kw):
""" Profile a single function call.
"""
self.enable_by_count()
try:
return func(*args, **kw)
finally:
self.disable_by_count()
| <commit_before>from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
<commit_msg>ENH: Add the typical run/runctx/runcall methods.<commit_after> | from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
def run(self, cmd):
""" Profile a single executable statment in the main namespace.
"""
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
""" Profile a single executable statement in the given namespaces.
"""
self.enable_by_count()
try:
exec cmd in globals, locals
finally:
self.disable_by_count()
return self
def runcall(self, func, *args, **kw):
""" Profile a single function call.
"""
self.enable_by_count()
try:
return func(*args, **kw)
finally:
self.disable_by_count()
| from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
ENH: Add the typical run/runctx/runcall methods.from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
def run(self, cmd):
""" Profile a single executable statment in the main namespace.
"""
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
""" Profile a single executable statement in the given namespaces.
"""
self.enable_by_count()
try:
exec cmd in globals, locals
finally:
self.disable_by_count()
return self
def runcall(self, func, *args, **kw):
""" Profile a single function call.
"""
self.enable_by_count()
try:
return func(*args, **kw)
finally:
self.disable_by_count()
| <commit_before>from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
<commit_msg>ENH: Add the typical run/runctx/runcall methods.<commit_after>from cProfile import label
import marshal
from _line_profiler import LineProfiler as CLineProfiler
class LineProfiler(CLineProfiler):
""" A subclass of the C version solely to provide a decorator since Cython
does not have closures.
"""
def __call__(self, func):
""" Decorate a function to start the profiler on function entry and stop
it on function exit.
"""
def f(*args, **kwds):
self.add_function(func)
self.enable_by_count()
try:
result = func(*args, **kwds)
finally:
self.disable_by_count()
return result
f.__name__ = func.__name__
f.__doc__ = func.__doc__
f.__dict__.update(func.__dict__)
return f
def dump_stats(self, filename):
""" Dump a representation of the data to a file as a marshalled
dictionary from `get_stats()`.
"""
stats = self.get_stats()
f = open(filename, 'wb')
try:
marshal.dump(stats, f)
finally:
f.close()
def run(self, cmd):
""" Profile a single executable statment in the main namespace.
"""
import __main__
dict = __main__.__dict__
return self.runctx(cmd, dict, dict)
def runctx(self, cmd, globals, locals):
""" Profile a single executable statement in the given namespaces.
"""
self.enable_by_count()
try:
exec cmd in globals, locals
finally:
self.disable_by_count()
return self
def runcall(self, func, *args, **kw):
""" Profile a single function call.
"""
self.enable_by_count()
try:
return func(*args, **kw)
finally:
self.disable_by_count()
|
117e8c717e4555aa9ee015336c36af186c1b0a85 | src/ocspdash/web/blueprints/ui.py | src/ocspdash/web/blueprints/ui.py | # -*- coding: utf-8 -*-
# import nacl.exceptions
# import nacl.signing
from flask import Blueprint, current_app, render_template
"""The OCSPdash homepage UI blueprint."""
# from nacl.encoding import URLSafeBase64Encoder
# from nacl.signing import VerifyKey
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
| # -*- coding: utf-8 -*-
"""The OCSPdash homepage UI blueprint."""
from flask import Blueprint, current_app, render_template
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
| Remove unused imports from UI blueprint | Remove unused imports from UI blueprint
| Python | mit | scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash | # -*- coding: utf-8 -*-
# import nacl.exceptions
# import nacl.signing
from flask import Blueprint, current_app, render_template
"""The OCSPdash homepage UI blueprint."""
# from nacl.encoding import URLSafeBase64Encoder
# from nacl.signing import VerifyKey
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
Remove unused imports from UI blueprint | # -*- coding: utf-8 -*-
"""The OCSPdash homepage UI blueprint."""
from flask import Blueprint, current_app, render_template
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
| <commit_before># -*- coding: utf-8 -*-
# import nacl.exceptions
# import nacl.signing
from flask import Blueprint, current_app, render_template
"""The OCSPdash homepage UI blueprint."""
# from nacl.encoding import URLSafeBase64Encoder
# from nacl.signing import VerifyKey
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
<commit_msg>Remove unused imports from UI blueprint<commit_after> | # -*- coding: utf-8 -*-
"""The OCSPdash homepage UI blueprint."""
from flask import Blueprint, current_app, render_template
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
| # -*- coding: utf-8 -*-
# import nacl.exceptions
# import nacl.signing
from flask import Blueprint, current_app, render_template
"""The OCSPdash homepage UI blueprint."""
# from nacl.encoding import URLSafeBase64Encoder
# from nacl.signing import VerifyKey
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
Remove unused imports from UI blueprint# -*- coding: utf-8 -*-
"""The OCSPdash homepage UI blueprint."""
from flask import Blueprint, current_app, render_template
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
| <commit_before># -*- coding: utf-8 -*-
# import nacl.exceptions
# import nacl.signing
from flask import Blueprint, current_app, render_template
"""The OCSPdash homepage UI blueprint."""
# from nacl.encoding import URLSafeBase64Encoder
# from nacl.signing import VerifyKey
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
<commit_msg>Remove unused imports from UI blueprint<commit_after># -*- coding: utf-8 -*-
"""The OCSPdash homepage UI blueprint."""
from flask import Blueprint, current_app, render_template
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = current_app.manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
|
5c0937993fdf34c96ccde3226c8e2a81efb381ce | troposphere/views/allocations.py | troposphere/views/allocations.py |
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
|
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
| Fix theme asset pathing in "no allocation" | Fix theme asset pathing in "no allocation"
| Python | apache-2.0 | CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend |
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
Fix theme asset pathing in "no allocation" |
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
| <commit_before>
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
<commit_msg>Fix theme asset pathing in "no allocation"<commit_after> |
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
|
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
Fix theme asset pathing in "no allocation"
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
| <commit_before>
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
<commit_msg>Fix theme asset pathing in "no allocation"<commit_after>
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
|
a86ca24eba556580a68695f4e0c2a55c8f5f3df1 | s3authbasic/views.py | s3authbasic/views.py | from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type='text/html')
response.app_iter = s3file
return response
| from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type=s3file.content_type)
response.app_iter = s3file
return response
| Set the correct content type according to the amazon metadata | Set the correct content type according to the amazon metadata
| Python | mit | ant30/s3authbasic | from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type='text/html')
response.app_iter = s3file
return response
Set the correct content type according to the amazon metadata | from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type=s3file.content_type)
response.app_iter = s3file
return response
| <commit_before>from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type='text/html')
response.app_iter = s3file
return response
<commit_msg>Set the correct content type according to the amazon metadata<commit_after> | from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type=s3file.content_type)
response.app_iter = s3file
return response
| from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type='text/html')
response.app_iter = s3file
return response
Set the correct content type according to the amazon metadatafrom pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type=s3file.content_type)
response.app_iter = s3file
return response
| <commit_before>from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type='text/html')
response.app_iter = s3file
return response
<commit_msg>Set the correct content type according to the amazon metadata<commit_after>from pyramid.httpexceptions import HTTPUnauthorized, HTTPNotFound
from pyramid.security import forget
from pyramid.response import Response
from pyramid.view import view_config, forbidden_view_config
@forbidden_view_config()
def basic_challenge(request):
response = HTTPUnauthorized()
response.headers.update(forget(request))
return response
@view_config(route_name='site', permission='view')
def site(request):
s3file = request.s3.get_file(request.path)
if s3file is None:
return HTTPNotFound()
response = Response(content_type=s3file.content_type)
response.app_iter = s3file
return response
|
01eece3984534dcd124df5d753f461f276fd6b53 | ckanext/ckanext-apicatalog_routes/ckanext/apicatalog_routes/tests/test_plugin.py | ckanext/ckanext-apicatalog_routes/ckanext/apicatalog_routes/tests/test_plugin.py | """Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class Apicatalog_Routes_Tests():
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
| """Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class ApicatalogRoutesTests(object):
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
| Fix some parameters for pytest to pickup the test | Fix some parameters for pytest to pickup the test
| Python | mit | vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog | """Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class Apicatalog_Routes_Tests():
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
Fix some parameters for pytest to pickup the test | """Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class ApicatalogRoutesTests(object):
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
| <commit_before>"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class Apicatalog_Routes_Tests():
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
<commit_msg>Fix some parameters for pytest to pickup the test<commit_after> | """Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class ApicatalogRoutesTests(object):
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
| """Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class Apicatalog_Routes_Tests():
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
Fix some parameters for pytest to pickup the test"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class ApicatalogRoutesTests(object):
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
| <commit_before>"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class Apicatalog_Routes_Tests():
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
<commit_msg>Fix some parameters for pytest to pickup the test<commit_after>"""Tests for plugin.py."""
import pytest
from ckan.tests import factories
import ckan.tests.helpers as helpers
from ckan.plugins.toolkit import NotAuthorized
@pytest.mark.ckan_config('ckan.plugins', 'apicatalog_routes')
@pytest.mark.usefixtures('clean_db', 'with_plugins', 'with_request_context')
class ApicatalogRoutesTests(object):
def non_sysadmins_should_not_be_able_to_delete_subsystems(self):
user = factories.User()
org_users = [{"name": user["name"], "capacity": "admin"}]
org = factories.Organization(users=org_users)
subsystem = factories.Dataset(
owner_org=org["id"]
)
context = {'ignore_auth': False, 'user': user['name']}
with pytest.raises(NotAuthorized):
helpers.call_action('package_delete', context, name=subsystem['name'])
|
4ea0cb50353b3d7cb7ee3dd4d16397db95d75223 | salt/states/rsync.py | salt/states/rsync.py | # -*- coding: utf-8 -*-
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
| # -*- coding: utf-8 -*-
#
# Copyright 2015 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
| Add license and SUSE copyright | Add license and SUSE copyright
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | # -*- coding: utf-8 -*-
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
Add license and SUSE copyright | # -*- coding: utf-8 -*-
#
# Copyright 2015 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
| <commit_before># -*- coding: utf-8 -*-
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
<commit_msg>Add license and SUSE copyright<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright 2015 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
| # -*- coding: utf-8 -*-
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
Add license and SUSE copyright# -*- coding: utf-8 -*-
#
# Copyright 2015 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
| <commit_before># -*- coding: utf-8 -*-
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
<commit_msg>Add license and SUSE copyright<commit_after># -*- coding: utf-8 -*-
#
# Copyright 2015 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Operations with Rsync.
'''
import salt.utils
def __virtual__():
'''
Only if Rsync is available.
:return:
'''
return salt.utils.which('rsync') and 'rsync' or False
def synchronized(name, source, delete=False, force=False, update=False,
passwordfile=None, exclude=None, excludefrom=None):
'''
Synchronizing directories:
.. code-block:: yaml
/opt/user-backups:
rsync.synchronized:
- source: /home
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
result = __salt__['rsync.rsync'](source, name, delete=delete, force=force, update=update,
passwordfile=passwordfile, exclude=exclude, excludefrom=excludefrom)
return ret
|
1c057c8ea1e75909e90992784cff177ea1cb294b | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '9c654df782c77449e7d8fa741843143145260aeb'
| #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '607907aed2c1dcdd3b5968a756a990ba3f47bca7'
| Update libchromiumcontent for iframe sandbox. | Update libchromiumcontent for iframe sandbox.
| Python | mit | leolujuyi/electron,Zagorakiss/electron,noikiy/electron,the-ress/electron,Neron-X5/electron,JussMee15/electron,Jacobichou/electron,iftekeriba/electron,webmechanicx/electron,michaelchiche/electron,bobwol/electron,mjaniszew/electron,aliib/electron,trankmichael/electron,saronwei/electron,xiruibing/electron,jjz/electron,leethomas/electron,sshiting/electron,smczk/electron,micalan/electron,mhkeller/electron,trigrass2/electron,vHanda/electron,michaelchiche/electron,Zagorakiss/electron,shockone/electron,mattdesl/electron,subblue/electron,noikiy/electron,bobwol/electron,aliib/electron,tylergibson/electron,Neron-X5/electron,edulan/electron,noikiy/electron,Andrey-Pavlov/electron,dongjoon-hyun/electron,fomojola/electron,stevemao/electron,wolfflow/electron,tonyganch/electron,the-ress/electron,nekuz0r/electron,systembugtj/electron,anko/electron,lrlna/electron,arturts/electron,nagyistoce/electron-atom-shell,beni55/electron,jtburke/electron,nicholasess/electron,coderhaoxin/electron,ervinb/electron,rsvip/electron,thompsonemerson/electron,wan-qy/electron,mattotodd/electron,vaginessa/electron,mhkeller/electron,Gerhut/electron,JesselJohn/electron,biblerule/UMCTelnetHub,lrlna/electron,bobwol/electron,jonatasfreitasv/electron,Andrey-Pavlov/electron,meowlab/electron,posix4e/electron,bpasero/electron,webmechanicx/electron,chriskdon/electron,vaginessa/electron,bbondy/electron,subblue/electron,wan-qy/electron,synaptek/electron,abhishekgahlot/electron,jlhbaseball15/electron,fabien-d/electron,SufianHassan/electron,vHanda/electron,preco21/electron,carsonmcdonald/electron,Gerhut/electron,evgenyzinoviev/electron,jaanus/electron,stevekinney/electron,GoooIce/electron,leolujuyi/electron,joneit/electron,RobertJGabriel/electron,abhishekgahlot/electron,rreimann/electron,thingsinjars/electron,adcentury/electron,RIAEvangelist/electron,JesselJohn/electron,iftekeriba/electron,setzer777/electron,chriskdon/electron,matiasinsaurralde/electron,lzpfmh/electron,kokdemo/electron,IonicaBizauKitchen/electron,chrisswk/electron,fritx/electron,systembugtj/electron,GoooIce/electron,simongregory/electron,MaxGraey/electron,thompsonemerson/electron,JussMee15/electron,renaesop/electron,deepak1556/atom-shell,lzpfmh/electron,bwiggs/electron,setzer777/electron,brave/electron,jacksondc/electron,adcentury/electron,Ivshti/electron,cqqccqc/electron,zhakui/electron,MaxWhere/electron,fritx/electron,gamedevsam/electron,leolujuyi/electron,bright-sparks/electron,rajatsingla28/electron,voidbridge/electron,twolfson/electron,carsonmcdonald/electron,IonicaBizauKitchen/electron,beni55/electron,the-ress/electron,twolfson/electron,stevemao/electron,kazupon/electron,Faiz7412/electron,trankmichael/electron,miniak/electron,bbondy/electron,pombredanne/electron,kcrt/electron,fffej/electron,aaron-goshine/electron,tylergibson/electron,bitemyapp/electron,benweissmann/electron,yan-foto/electron,xfstudio/electron,cos2004/electron,RIAEvangelist/electron,thingsinjars/electron,gabrielPeart/electron,minggo/electron,maxogden/atom-shell,DivyaKMenon/electron,tonyganch/electron,benweissmann/electron,stevemao/electron,brenca/electron,gbn972/electron,hokein/atom-shell,nicholasess/electron,noikiy/electron,pandoraui/electron,jonatasfreitasv/electron,Evercoder/electron,eriser/electron,shiftkey/electron,fffej/electron,chrisswk/electron,joaomoreno/atom-shell,ankitaggarwal011/electron,shaundunne/electron,LadyNaggaga/electron,John-Lin/electron,christian-bromann/electron,brave/muon,bright-sparks/electron,lrlna/electron,gabriel/electron,michaelchiche/electron,rprichard/electron,etiktin/electron,bruce/electron,jsutcodes/electron,Andrey-Pavlov/electron,kikong/electron,tomashanacek/electron,stevekinney/electron,baiwyc119/electron,synaptek/electron,aichingm/electron,cos2004/electron,aichingm/electron,soulteary/electron,Floato/electron,chrisswk/electron,gbn972/electron,jonatasfreitasv/electron,shennushi/electron,farmisen/electron,kostia/electron,carsonmcdonald/electron,systembugtj/electron,ianscrivener/electron,trigrass2/electron,adamjgray/electron,arusakov/electron,benweissmann/electron,fireball-x/atom-shell,thompsonemerson/electron,destan/electron,vaginessa/electron,jcblw/electron,dahal/electron,egoist/electron,jcblw/electron,MaxGraey/electron,etiktin/electron,meowlab/electron,Ivshti/electron,minggo/electron,deepak1556/atom-shell,kenmozi/electron,brenca/electron,systembugtj/electron,Neron-X5/electron,MaxGraey/electron,brave/muon,joaomoreno/atom-shell,bruce/electron,Gerhut/electron,renaesop/electron,kikong/electron,gamedevsam/electron,shaundunne/electron,fomojola/electron,nekuz0r/electron,medixdev/electron,iftekeriba/electron,Jonekee/electron,zhakui/electron,wolfflow/electron,leftstick/electron,evgenyzinoviev/electron,kokdemo/electron,jlord/electron,kazupon/electron,mrwizard82d1/electron,ankitaggarwal011/electron,kostia/electron,greyhwndz/electron,saronwei/electron,soulteary/electron,seanchas116/electron,kenmozi/electron,bitemyapp/electron,rsvip/electron,Evercoder/electron,nagyistoce/electron-atom-shell,Evercoder/electron,gabriel/electron,bwiggs/electron,Jacobichou/electron,aecca/electron,jlhbaseball15/electron,leftstick/electron,rhencke/electron,yalexx/electron,leethomas/electron,ankitaggarwal011/electron,renaesop/electron,Faiz7412/electron,jcblw/electron,kazupon/electron,farmisen/electron,howmuchcomputer/electron,simonfork/electron,the-ress/electron,dkfiresky/electron,tinydew4/electron,arturts/electron,yan-foto/electron,setzer777/electron,dkfiresky/electron,dkfiresky/electron,kenmozi/electron,renaesop/electron,Rokt33r/electron,thomsonreuters/electron,astoilkov/electron,sshiting/electron,eric-seekas/electron,mubassirhayat/electron,timruffles/electron,rsvip/electron,baiwyc119/electron,bbondy/electron,the-ress/electron,gbn972/electron,yan-foto/electron,subblue/electron,d-salas/electron,Neron-X5/electron,rajatsingla28/electron,aichingm/electron,tylergibson/electron,deed02392/electron,leolujuyi/electron,pombredanne/electron,LadyNaggaga/electron,setzer777/electron,ervinb/electron,JussMee15/electron,yalexx/electron,Gerhut/electron,RIAEvangelist/electron,fomojola/electron,tinydew4/electron,zhakui/electron,Neron-X5/electron,mhkeller/electron,yalexx/electron,thingsinjars/electron,christian-bromann/electron,gerhardberger/electron,edulan/electron,fffej/electron,posix4e/electron,RobertJGabriel/electron,evgenyzinoviev/electron,mjaniszew/electron,mjaniszew/electron,thingsinjars/electron,bbondy/electron,pandoraui/electron,fritx/electron,saronwei/electron,bpasero/electron,chrisswk/electron,matiasinsaurralde/electron,micalan/electron,pirafrank/electron,stevekinney/electron,JussMee15/electron,howmuchcomputer/electron,ianscrivener/electron,BionicClick/electron,simongregory/electron,subblue/electron,digideskio/electron,Jonekee/electron,gstack/infinium-shell,Andrey-Pavlov/electron,ervinb/electron,mirrh/electron,sircharleswatson/electron,arusakov/electron,kcrt/electron,bitemyapp/electron,Floato/electron,cos2004/electron,howmuchcomputer/electron,thomsonreuters/electron,jsutcodes/electron,mrwizard82d1/electron,icattlecoder/electron,micalan/electron,rreimann/electron,brave/electron,egoist/electron,miniak/electron,webmechanicx/electron,astoilkov/electron,cos2004/electron,destan/electron,Faiz7412/electron,Jacobichou/electron,baiwyc119/electron,maxogden/atom-shell,mattotodd/electron,brave/muon,meowlab/electron,jlhbaseball15/electron,Evercoder/electron,mubassirhayat/electron,thompsonemerson/electron,egoist/electron,dahal/electron,mattotodd/electron,leftstick/electron,rhencke/electron,pirafrank/electron,trigrass2/electron,tincan24/electron,jjz/electron,aichingm/electron,eriser/electron,aichingm/electron,JussMee15/electron,MaxWhere/electron,micalan/electron,smczk/electron,voidbridge/electron,BionicClick/electron,lzpfmh/electron,fritx/electron,ervinb/electron,eric-seekas/electron,neutrous/electron,nekuz0r/electron,Faiz7412/electron,webmechanicx/electron,eric-seekas/electron,mirrh/electron,leftstick/electron,dongjoon-hyun/electron,cqqccqc/electron,JesselJohn/electron,edulan/electron,deed02392/electron,gerhardberger/electron,brenca/electron,jhen0409/electron,joaomoreno/atom-shell,wolfflow/electron,iftekeriba/electron,deepak1556/atom-shell,vaginessa/electron,vHanda/electron,SufianHassan/electron,bwiggs/electron,IonicaBizauKitchen/electron,rhencke/electron,leethomas/electron,webmechanicx/electron,davazp/electron,joneit/electron,xiruibing/electron,Zagorakiss/electron,oiledCode/electron,gabrielPeart/electron,kenmozi/electron,kokdemo/electron,wan-qy/electron,bitemyapp/electron,anko/electron,evgenyzinoviev/electron,gamedevsam/electron,zhakui/electron,SufianHassan/electron,leethomas/electron,shiftkey/electron,gabrielPeart/electron,pirafrank/electron,jannishuebl/electron,hokein/atom-shell,meowlab/electron,rajatsingla28/electron,jannishuebl/electron,soulteary/electron,neutrous/electron,d-salas/electron,kcrt/electron,voidbridge/electron,dongjoon-hyun/electron,Andrey-Pavlov/electron,dongjoon-hyun/electron,Zagorakiss/electron,brave/electron,electron/electron,biblerule/UMCTelnetHub,nicholasess/electron,jtburke/electron,destan/electron,GoooIce/electron,kokdemo/electron,DivyaKMenon/electron,deepak1556/atom-shell,posix4e/electron,GoooIce/electron,neutrous/electron,Jonekee/electron,davazp/electron,smczk/electron,bobwol/electron,seanchas116/electron,cos2004/electron,xiruibing/electron,Jacobichou/electron,kostia/electron,Zagorakiss/electron,deepak1556/atom-shell,jlord/electron,felixrieseberg/electron,Ivshti/electron,mattdesl/electron,destan/electron,darwin/electron,michaelchiche/electron,renaesop/electron,MaxWhere/electron,kikong/electron,jcblw/electron,pombredanne/electron,coderhaoxin/electron,tomashanacek/electron,aecca/electron,adcentury/electron,farmisen/electron,dahal/electron,trankmichael/electron,trigrass2/electron,kazupon/electron,jiaz/electron,Neron-X5/electron,eric-seekas/electron,natgolov/electron,tomashanacek/electron,LadyNaggaga/electron,fireball-x/atom-shell,gstack/infinium-shell,ankitaggarwal011/electron,howmuchcomputer/electron,gerhardberger/electron,maxogden/atom-shell,simonfork/electron,sky7sea/electron,meowlab/electron,subblue/electron,mhkeller/electron,tinydew4/electron,roadev/electron,natgolov/electron,kazupon/electron,vipulroxx/electron,pombredanne/electron,SufianHassan/electron,Rokt33r/electron,brenca/electron,sky7sea/electron,lrlna/electron,synaptek/electron,egoist/electron,gerhardberger/electron,wan-qy/electron,tincan24/electron,trankmichael/electron,jacksondc/electron,bruce/electron,arusakov/electron,jtburke/electron,felixrieseberg/electron,LadyNaggaga/electron,jonatasfreitasv/electron,vaginessa/electron,jtburke/electron,dkfiresky/electron,Gerhut/electron,christian-bromann/electron,arusakov/electron,icattlecoder/electron,MaxWhere/electron,mattdesl/electron,pombredanne/electron,jcblw/electron,lrlna/electron,zhakui/electron,saronwei/electron,mirrh/electron,jhen0409/electron,trigrass2/electron,fritx/electron,brave/electron,farmisen/electron,micalan/electron,adamjgray/electron,robinvandernoord/electron,leftstick/electron,michaelchiche/electron,vipulroxx/electron,cqqccqc/electron,jhen0409/electron,deed02392/electron,ankitaggarwal011/electron,gstack/infinium-shell,posix4e/electron,shockone/electron,etiktin/electron,rprichard/electron,stevekinney/electron,seanchas116/electron,renaesop/electron,anko/electron,jjz/electron,bruce/electron,thompsonemerson/electron,abhishekgahlot/electron,rhencke/electron,aliib/electron,joaomoreno/atom-shell,medixdev/electron,beni55/electron,John-Lin/electron,biblerule/UMCTelnetHub,mattdesl/electron,kostia/electron,kcrt/electron,tonyganch/electron,aaron-goshine/electron,icattlecoder/electron,mirrh/electron,brenca/electron,rajatsingla28/electron,edulan/electron,tinydew4/electron,cqqccqc/electron,matiasinsaurralde/electron,Rokt33r/electron,eriser/electron,zhakui/electron,greyhwndz/electron,icattlecoder/electron,vHanda/electron,tincan24/electron,Andrey-Pavlov/electron,jhen0409/electron,joaomoreno/atom-shell,smczk/electron,tonyganch/electron,rsvip/electron,roadev/electron,roadev/electron,benweissmann/electron,shiftkey/electron,BionicClick/electron,fffej/electron,voidbridge/electron,RobertJGabriel/electron,mattotodd/electron,fireball-x/atom-shell,kokdemo/electron,evgenyzinoviev/electron,gabriel/electron,fritx/electron,jannishuebl/electron,wan-qy/electron,gamedevsam/electron,roadev/electron,oiledCode/electron,mattdesl/electron,jiaz/electron,neutrous/electron,d-salas/electron,BionicClick/electron,xfstudio/electron,RIAEvangelist/electron,rhencke/electron,matiasinsaurralde/electron,beni55/electron,medixdev/electron,trankmichael/electron,aliib/electron,matiasinsaurralde/electron,kokdemo/electron,wolfflow/electron,jhen0409/electron,edulan/electron,mattotodd/electron,simongregory/electron,pandoraui/electron,Gerhut/electron,vipulroxx/electron,coderhaoxin/electron,bpasero/electron,aaron-goshine/electron,anko/electron,posix4e/electron,pirafrank/electron,brave/electron,thingsinjars/electron,vipulroxx/electron,trigrass2/electron,darwin/electron,simonfork/electron,shockone/electron,jacksondc/electron,oiledCode/electron,kostia/electron,pandoraui/electron,xfstudio/electron,seanchas116/electron,fabien-d/electron,jannishuebl/electron,brave/muon,ianscrivener/electron,DivyaKMenon/electron,maxogden/atom-shell,tomashanacek/electron,JesselJohn/electron,voidbridge/electron,jiaz/electron,gstack/infinium-shell,shockone/electron,takashi/electron,robinvandernoord/electron,d-salas/electron,John-Lin/electron,sircharleswatson/electron,eriser/electron,christian-bromann/electron,christian-bromann/electron,natgolov/electron,mubassirhayat/electron,coderhaoxin/electron,astoilkov/electron,rhencke/electron,deed02392/electron,adcentury/electron,jlord/electron,bobwol/electron,kostia/electron,neutrous/electron,yalexx/electron,shiftkey/electron,nicholasess/electron,gabrielPeart/electron,jlhbaseball15/electron,davazp/electron,webmechanicx/electron,tonyganch/electron,bpasero/electron,greyhwndz/electron,miniak/electron,RIAEvangelist/electron,the-ress/electron,jcblw/electron,gamedevsam/electron,electron/electron,tylergibson/electron,neutrous/electron,LadyNaggaga/electron,darwin/electron,benweissmann/electron,fireball-x/atom-shell,voidbridge/electron,setzer777/electron,brave/muon,iftekeriba/electron,sircharleswatson/electron,kcrt/electron,farmisen/electron,rprichard/electron,Floato/electron,rreimann/electron,bwiggs/electron,etiktin/electron,rreimann/electron,aaron-goshine/electron,mjaniszew/electron,aecca/electron,RIAEvangelist/electron,gerhardberger/electron,arusakov/electron,preco21/electron,jonatasfreitasv/electron,eric-seekas/electron,chriskdon/electron,chrisswk/electron,jaanus/electron,nicobot/electron,pandoraui/electron,Rokt33r/electron,faizalpribadi/electron,vipulroxx/electron,bpasero/electron,gamedevsam/electron,preco21/electron,dkfiresky/electron,leolujuyi/electron,tomashanacek/electron,lzpfmh/electron,gbn972/electron,BionicClick/electron,gstack/infinium-shell,shennushi/electron,JesselJohn/electron,farmisen/electron,takashi/electron,twolfson/electron,medixdev/electron,dahal/electron,sircharleswatson/electron,abhishekgahlot/electron,digideskio/electron,darwin/electron,sircharleswatson/electron,davazp/electron,bitemyapp/electron,IonicaBizauKitchen/electron,arusakov/electron,Floato/electron,jonatasfreitasv/electron,faizalpribadi/electron,medixdev/electron,mhkeller/electron,lzpfmh/electron,kenmozi/electron,seanchas116/electron,Jacobichou/electron,pirafrank/electron,wan-qy/electron,leethomas/electron,leolujuyi/electron,baiwyc119/electron,biblerule/UMCTelnetHub,sky7sea/electron,mrwizard82d1/electron,xiruibing/electron,mhkeller/electron,jiaz/electron,tincan24/electron,ervinb/electron,icattlecoder/electron,bobwol/electron,preco21/electron,benweissmann/electron,roadev/electron,deed02392/electron,Jonekee/electron,rajatsingla28/electron,jlord/electron,subblue/electron,fomojola/electron,bright-sparks/electron,digideskio/electron,brave/electron,twolfson/electron,bright-sparks/electron,tonyganch/electron,d-salas/electron,jannishuebl/electron,bruce/electron,bwiggs/electron,abhishekgahlot/electron,greyhwndz/electron,lrlna/electron,stevekinney/electron,xfstudio/electron,dkfiresky/electron,greyhwndz/electron,biblerule/UMCTelnetHub,sshiting/electron,rreimann/electron,pandoraui/electron,bruce/electron,simongregory/electron,vHanda/electron,joneit/electron,tincan24/electron,DivyaKMenon/electron,soulteary/electron,smczk/electron,dongjoon-hyun/electron,Ivshti/electron,destan/electron,kcrt/electron,carsonmcdonald/electron,natgolov/electron,Rokt33r/electron,adamjgray/electron,aaron-goshine/electron,mrwizard82d1/electron,rajatsingla28/electron,jaanus/electron,fffej/electron,nicobot/electron,jlhbaseball15/electron,thomsonreuters/electron,gerhardberger/electron,digideskio/electron,felixrieseberg/electron,shaundunne/electron,gerhardberger/electron,arturts/electron,preco21/electron,felixrieseberg/electron,maxogden/atom-shell,faizalpribadi/electron,nicholasess/electron,adamjgray/electron,gabrielPeart/electron,JussMee15/electron,dahal/electron,timruffles/electron,arturts/electron,jjz/electron,coderhaoxin/electron,mirrh/electron,thomsonreuters/electron,mubassirhayat/electron,d-salas/electron,robinvandernoord/electron,felixrieseberg/electron,MaxGraey/electron,DivyaKMenon/electron,lzpfmh/electron,rsvip/electron,GoooIce/electron,takashi/electron,Floato/electron,noikiy/electron,adamjgray/electron,sky7sea/electron,nicobot/electron,shaundunne/electron,christian-bromann/electron,dahal/electron,minggo/electron,seanchas116/electron,kazupon/electron,yan-foto/electron,joneit/electron,meowlab/electron,synaptek/electron,timruffles/electron,ianscrivener/electron,ervinb/electron,jsutcodes/electron,vaginessa/electron,trankmichael/electron,shennushi/electron,aliib/electron,RobertJGabriel/electron,nicobot/electron,shaundunne/electron,bbondy/electron,posix4e/electron,nicobot/electron,minggo/electron,shockone/electron,stevemao/electron,destan/electron,howmuchcomputer/electron,JesselJohn/electron,shennushi/electron,stevemao/electron,simonfork/electron,jjz/electron,electron/electron,jlhbaseball15/electron,biblerule/UMCTelnetHub,sky7sea/electron,DivyaKMenon/electron,jacksondc/electron,fabien-d/electron,fabien-d/electron,aecca/electron,yalexx/electron,fabien-d/electron,cos2004/electron,electron/electron,xiruibing/electron,oiledCode/electron,jaanus/electron,preco21/electron,setzer777/electron,saronwei/electron,saronwei/electron,electron/electron,nicholasess/electron,davazp/electron,tylergibson/electron,robinvandernoord/electron,cqqccqc/electron,carsonmcdonald/electron,jannishuebl/electron,sshiting/electron,robinvandernoord/electron,IonicaBizauKitchen/electron,stevekinney/electron,thompsonemerson/electron,etiktin/electron,stevemao/electron,jaanus/electron,Evercoder/electron,miniak/electron,John-Lin/electron,shaundunne/electron,gbn972/electron,nekuz0r/electron,nagyistoce/electron-atom-shell,astoilkov/electron,anko/electron,sshiting/electron,soulteary/electron,nekuz0r/electron,jjz/electron,bpasero/electron,aichingm/electron,kikong/electron,John-Lin/electron,baiwyc119/electron,nicobot/electron,shiftkey/electron,jiaz/electron,vHanda/electron,sshiting/electron,nagyistoce/electron-atom-shell,bright-sparks/electron,nagyistoce/electron-atom-shell,mubassirhayat/electron,electron/electron,simonfork/electron,bbondy/electron,fomojola/electron,roadev/electron,MaxWhere/electron,Faiz7412/electron,fffej/electron,mjaniszew/electron,twolfson/electron,jtburke/electron,pombredanne/electron,joaomoreno/atom-shell,ianscrivener/electron,mirrh/electron,takashi/electron,eriser/electron,digideskio/electron,etiktin/electron,yan-foto/electron,jlord/electron,michaelchiche/electron,minggo/electron,Zagorakiss/electron,beni55/electron,rreimann/electron,RobertJGabriel/electron,tincan24/electron,brave/muon,minggo/electron,leethomas/electron,astoilkov/electron,faizalpribadi/electron,dongjoon-hyun/electron,medixdev/electron,systembugtj/electron,greyhwndz/electron,gabriel/electron,ianscrivener/electron,adcentury/electron,arturts/electron,Rokt33r/electron,mrwizard82d1/electron,eriser/electron,kikong/electron,mjaniszew/electron,sircharleswatson/electron,beni55/electron,chriskdon/electron,jhen0409/electron,evgenyzinoviev/electron,gabrielPeart/electron,synaptek/electron,soulteary/electron,simongregory/electron,tomashanacek/electron,BionicClick/electron,timruffles/electron,jtburke/electron,wolfflow/electron,micalan/electron,noikiy/electron,miniak/electron,systembugtj/electron,mattotodd/electron,kenmozi/electron,darwin/electron,chriskdon/electron,John-Lin/electron,simongregory/electron,GoooIce/electron,synaptek/electron,yalexx/electron,leftstick/electron,davazp/electron,simonfork/electron,thomsonreuters/electron,timruffles/electron,jaanus/electron,adcentury/electron,arturts/electron,hokein/atom-shell,bright-sparks/electron,faizalpribadi/electron,Jonekee/electron,ankitaggarwal011/electron,sky7sea/electron,yan-foto/electron,abhishekgahlot/electron,aecca/electron,jsutcodes/electron,anko/electron,carsonmcdonald/electron,xiruibing/electron,hokein/atom-shell,electron/electron,iftekeriba/electron,Evercoder/electron,MaxGraey/electron,gabriel/electron,cqqccqc/electron,mrwizard82d1/electron,mattdesl/electron,shiftkey/electron,tinydew4/electron,Ivshti/electron,jacksondc/electron,eric-seekas/electron,LadyNaggaga/electron,joneit/electron,takashi/electron,jsutcodes/electron,aaron-goshine/electron,MaxWhere/electron,oiledCode/electron,smczk/electron,takashi/electron,oiledCode/electron,vipulroxx/electron,xfstudio/electron,rprichard/electron,twolfson/electron,aliib/electron,fireball-x/atom-shell,jacksondc/electron,shockone/electron,deed02392/electron,Jonekee/electron,egoist/electron,egoist/electron,bitemyapp/electron,gbn972/electron,aecca/electron,the-ress/electron,faizalpribadi/electron,felixrieseberg/electron,digideskio/electron,fomojola/electron,astoilkov/electron,natgolov/electron,baiwyc119/electron,adamjgray/electron,jiaz/electron,bpasero/electron,joneit/electron,hokein/atom-shell,shennushi/electron,jsutcodes/electron,howmuchcomputer/electron,RobertJGabriel/electron,thomsonreuters/electron,IonicaBizauKitchen/electron,shennushi/electron,Floato/electron,pirafrank/electron,SufianHassan/electron,robinvandernoord/electron,icattlecoder/electron,brenca/electron,thingsinjars/electron,matiasinsaurralde/electron,tinydew4/electron,coderhaoxin/electron,SufianHassan/electron,miniak/electron,gabriel/electron,wolfflow/electron,Jacobichou/electron,chriskdon/electron,xfstudio/electron,nekuz0r/electron,tylergibson/electron,natgolov/electron,edulan/electron,bwiggs/electron | #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '9c654df782c77449e7d8fa741843143145260aeb'
Update libchromiumcontent for iframe sandbox. | #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '607907aed2c1dcdd3b5968a756a990ba3f47bca7'
| <commit_before>#!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '9c654df782c77449e7d8fa741843143145260aeb'
<commit_msg>Update libchromiumcontent for iframe sandbox.<commit_after> | #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '607907aed2c1dcdd3b5968a756a990ba3f47bca7'
| #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '9c654df782c77449e7d8fa741843143145260aeb'
Update libchromiumcontent for iframe sandbox.#!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '607907aed2c1dcdd3b5968a756a990ba3f47bca7'
| <commit_before>#!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '9c654df782c77449e7d8fa741843143145260aeb'
<commit_msg>Update libchromiumcontent for iframe sandbox.<commit_after>#!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '607907aed2c1dcdd3b5968a756a990ba3f47bca7'
|
d874ba80db5bedb67b0b50cea431321c77b10f5d | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'ea1a7e85a3de1878e5656110c76f4d2d8af41c6e'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '2cf80c1743e370c12eb7bf078eb425f3cc355383'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| Upgrade libchromiumcontent for gin headers. | Upgrade libchromiumcontent for gin headers.
| Python | mit | icattlecoder/electron,ervinb/electron,pandoraui/electron,jlord/electron,Rokt33r/electron,fireball-x/atom-shell,michaelchiche/electron,gamedevsam/electron,tonyganch/electron,noikiy/electron,nicobot/electron,leftstick/electron,bobwol/electron,maxogden/atom-shell,bobwol/electron,GoooIce/electron,soulteary/electron,jcblw/electron,icattlecoder/electron,jsutcodes/electron,sshiting/electron,voidbridge/electron,webmechanicx/electron,bruce/electron,shennushi/electron,rreimann/electron,takashi/electron,aliib/electron,vHanda/electron,mhkeller/electron,vaginessa/electron,wan-qy/electron,MaxGraey/electron,bpasero/electron,darwin/electron,subblue/electron,Faiz7412/electron,BionicClick/electron,jhen0409/electron,Rokt33r/electron,gabriel/electron,sircharleswatson/electron,Evercoder/electron,etiktin/electron,beni55/electron,tylergibson/electron,yalexx/electron,astoilkov/electron,noikiy/electron,stevemao/electron,webmechanicx/electron,GoooIce/electron,synaptek/electron,soulteary/electron,michaelchiche/electron,tomashanacek/electron,jonatasfreitasv/electron,jannishuebl/electron,icattlecoder/electron,tylergibson/electron,gbn972/electron,soulteary/electron,gabrielPeart/electron,Gerhut/electron,neutrous/electron,bwiggs/electron,shiftkey/electron,jhen0409/electron,Andrey-Pavlov/electron,brave/electron,joaomoreno/atom-shell,aecca/electron,rsvip/electron,anko/electron,lrlna/electron,fritx/electron,bbondy/electron,oiledCode/electron,tylergibson/electron,jonatasfreitasv/electron,shiftkey/electron,electron/electron,adcentury/electron,bitemyapp/electron,bpasero/electron,neutrous/electron,smczk/electron,rhencke/electron,carsonmcdonald/electron,fabien-d/electron,mubassirhayat/electron,Floato/electron,howmuchcomputer/electron,cos2004/electron,fffej/electron,rajatsingla28/electron,shockone/electron,evgenyzinoviev/electron,tylergibson/electron,shaundunne/electron,renaesop/electron,Ivshti/electron,ervinb/electron,Andrey-Pavlov/electron,jaanus/electron,bitemyapp/electron,leolujuyi/electron,DivyaKMenon/electron,pandoraui/electron,leftstick/electron,ianscrivener/electron,nicholasess/electron,iftekeriba/electron,beni55/electron,sircharleswatson/electron,kostia/electron,the-ress/electron,pombredanne/electron,leethomas/electron,dkfiresky/electron,Ivshti/electron,lzpfmh/electron,nicholasess/electron,kazupon/electron,sshiting/electron,GoooIce/electron,kostia/electron,bitemyapp/electron,ervinb/electron,jtburke/electron,kazupon/electron,sircharleswatson/electron,howmuchcomputer/electron,mrwizard82d1/electron,RIAEvangelist/electron,gabriel/electron,etiktin/electron,gabrielPeart/electron,edulan/electron,GoooIce/electron,kokdemo/electron,eriser/electron,dahal/electron,dongjoon-hyun/electron,miniak/electron,chriskdon/electron,nekuz0r/electron,RIAEvangelist/electron,evgenyzinoviev/electron,edulan/electron,rprichard/electron,brenca/electron,brave/electron,aaron-goshine/electron,takashi/electron,anko/electron,miniak/electron,aliib/electron,dahal/electron,fireball-x/atom-shell,sky7sea/electron,ianscrivener/electron,bruce/electron,brenca/electron,cqqccqc/electron,zhakui/electron,vipulroxx/electron,MaxWhere/electron,fritx/electron,astoilkov/electron,greyhwndz/electron,miniak/electron,vHanda/electron,saronwei/electron,trigrass2/electron,fffej/electron,kikong/electron,webmechanicx/electron,smczk/electron,d-salas/electron,aaron-goshine/electron,takashi/electron,stevekinney/electron,hokein/atom-shell,natgolov/electron,vipulroxx/electron,lzpfmh/electron,soulteary/electron,fabien-d/electron,smczk/electron,mjaniszew/electron,deepak1556/atom-shell,MaxGraey/electron,the-ress/electron,fritx/electron,kazupon/electron,vHanda/electron,shaundunne/electron,carsonmcdonald/electron,JesselJohn/electron,wolfflow/electron,pombredanne/electron,Andrey-Pavlov/electron,sky7sea/electron,destan/electron,adcentury/electron,destan/electron,thomsonreuters/electron,kokdemo/electron,jjz/electron,wan-qy/electron,aecca/electron,IonicaBizauKitchen/electron,xfstudio/electron,xfstudio/electron,simonfork/electron,dahal/electron,subblue/electron,synaptek/electron,faizalpribadi/electron,leolujuyi/electron,BionicClick/electron,michaelchiche/electron,mhkeller/electron,John-Lin/electron,kikong/electron,howmuchcomputer/electron,jannishuebl/electron,trigrass2/electron,subblue/electron,dkfiresky/electron,mattotodd/electron,oiledCode/electron,seanchas116/electron,leethomas/electron,twolfson/electron,tinydew4/electron,BionicClick/electron,bruce/electron,rsvip/electron,deed02392/electron,simonfork/electron,mirrh/electron,dahal/electron,eriser/electron,lzpfmh/electron,smczk/electron,kostia/electron,simongregory/electron,aaron-goshine/electron,rajatsingla28/electron,aliib/electron,fomojola/electron,kenmozi/electron,eric-seekas/electron,Neron-X5/electron,MaxGraey/electron,Jacobichou/electron,medixdev/electron,arturts/electron,pombredanne/electron,matiasinsaurralde/electron,kokdemo/electron,rajatsingla28/electron,Jonekee/electron,brave/electron,iftekeriba/electron,michaelchiche/electron,simongregory/electron,rreimann/electron,BionicClick/electron,matiasinsaurralde/electron,pirafrank/electron,Jacobichou/electron,simonfork/electron,jannishuebl/electron,fomojola/electron,tincan24/electron,ankitaggarwal011/electron,posix4e/electron,neutrous/electron,wolfflow/electron,sircharleswatson/electron,setzer777/electron,kokdemo/electron,LadyNaggaga/electron,xfstudio/electron,anko/electron,vaginessa/electron,vaginessa/electron,dongjoon-hyun/electron,jlhbaseball15/electron,leolujuyi/electron,renaesop/electron,fomojola/electron,JussMee15/electron,electron/electron,dkfiresky/electron,vaginessa/electron,nagyistoce/electron-atom-shell,robinvandernoord/electron,RobertJGabriel/electron,leftstick/electron,sky7sea/electron,kazupon/electron,setzer777/electron,aaron-goshine/electron,kcrt/electron,gbn972/electron,the-ress/electron,thomsonreuters/electron,darwin/electron,leftstick/electron,Evercoder/electron,kcrt/electron,vaginessa/electron,gabrielPeart/electron,kcrt/electron,ankitaggarwal011/electron,aaron-goshine/electron,IonicaBizauKitchen/electron,rsvip/electron,saronwei/electron,deepak1556/atom-shell,GoooIce/electron,JesselJohn/electron,Ivshti/electron,gamedevsam/electron,wolfflow/electron,twolfson/electron,nagyistoce/electron-atom-shell,mrwizard82d1/electron,gerhardberger/electron,systembugtj/electron,robinvandernoord/electron,noikiy/electron,benweissmann/electron,thompsonemerson/electron,thompsonemerson/electron,gabrielPeart/electron,rsvip/electron,robinvandernoord/electron,MaxGraey/electron,LadyNaggaga/electron,simongregory/electron,aichingm/electron,chriskdon/electron,tincan24/electron,aliib/electron,medixdev/electron,timruffles/electron,jonatasfreitasv/electron,jjz/electron,mhkeller/electron,micalan/electron,bobwol/electron,MaxWhere/electron,bbondy/electron,xiruibing/electron,jiaz/electron,abhishekgahlot/electron,abhishekgahlot/electron,dongjoon-hyun/electron,sky7sea/electron,etiktin/electron,dkfiresky/electron,digideskio/electron,eric-seekas/electron,jjz/electron,Jonekee/electron,joaomoreno/atom-shell,xfstudio/electron,meowlab/electron,Ivshti/electron,jacksondc/electron,davazp/electron,bruce/electron,jacksondc/electron,maxogden/atom-shell,nagyistoce/electron-atom-shell,shaundunne/electron,brenca/electron,mattotodd/electron,saronwei/electron,egoist/electron,dongjoon-hyun/electron,brenca/electron,wan-qy/electron,minggo/electron,michaelchiche/electron,timruffles/electron,stevekinney/electron,systembugtj/electron,voidbridge/electron,thingsinjars/electron,Zagorakiss/electron,mubassirhayat/electron,hokein/atom-shell,the-ress/electron,christian-bromann/electron,joaomoreno/atom-shell,aichingm/electron,egoist/electron,brave/electron,sky7sea/electron,mjaniszew/electron,zhakui/electron,thompsonemerson/electron,chrisswk/electron,voidbridge/electron,jonatasfreitasv/electron,gerhardberger/electron,bobwol/electron,meowlab/electron,tinydew4/electron,darwin/electron,kenmozi/electron,synaptek/electron,benweissmann/electron,joneit/electron,aichingm/electron,shockone/electron,gbn972/electron,JussMee15/electron,icattlecoder/electron,rreimann/electron,SufianHassan/electron,nicobot/electron,carsonmcdonald/electron,baiwyc119/electron,adamjgray/electron,joneit/electron,micalan/electron,chriskdon/electron,kostia/electron,matiasinsaurralde/electron,Evercoder/electron,ianscrivener/electron,webmechanicx/electron,DivyaKMenon/electron,digideskio/electron,RIAEvangelist/electron,mrwizard82d1/electron,jacksondc/electron,Zagorakiss/electron,hokein/atom-shell,twolfson/electron,chriskdon/electron,fireball-x/atom-shell,aecca/electron,trankmichael/electron,shaundunne/electron,minggo/electron,farmisen/electron,Neron-X5/electron,kazupon/electron,nicholasess/electron,biblerule/UMCTelnetHub,gbn972/electron,medixdev/electron,jiaz/electron,farmisen/electron,bruce/electron,preco21/electron,arturts/electron,jsutcodes/electron,MaxGraey/electron,baiwyc119/electron,sshiting/electron,renaesop/electron,seanchas116/electron,jtburke/electron,Jacobichou/electron,John-Lin/electron,maxogden/atom-shell,rhencke/electron,fffej/electron,Jonekee/electron,pandoraui/electron,tinydew4/electron,xiruibing/electron,eric-seekas/electron,renaesop/electron,bright-sparks/electron,wolfflow/electron,tonyganch/electron,robinvandernoord/electron,evgenyzinoviev/electron,coderhaoxin/electron,deed02392/electron,mattotodd/electron,leolujuyi/electron,fffej/electron,Neron-X5/electron,stevekinney/electron,Jacobichou/electron,sircharleswatson/electron,tincan24/electron,jacksondc/electron,beni55/electron,adcentury/electron,jacksondc/electron,the-ress/electron,d-salas/electron,iftekeriba/electron,John-Lin/electron,bright-sparks/electron,mjaniszew/electron,brave/muon,yalexx/electron,takashi/electron,fabien-d/electron,takashi/electron,medixdev/electron,shiftkey/electron,kostia/electron,stevemao/electron,darwin/electron,SufianHassan/electron,mhkeller/electron,gabriel/electron,mjaniszew/electron,edulan/electron,davazp/electron,gstack/infinium-shell,electron/electron,minggo/electron,bwiggs/electron,mattotodd/electron,jaanus/electron,mrwizard82d1/electron,cos2004/electron,trankmichael/electron,gabrielPeart/electron,matiasinsaurralde/electron,seanchas116/electron,deed02392/electron,trigrass2/electron,Gerhut/electron,jtburke/electron,saronwei/electron,mattdesl/electron,GoooIce/electron,JesselJohn/electron,trankmichael/electron,nicobot/electron,cqqccqc/electron,felixrieseberg/electron,arusakov/electron,John-Lin/electron,vHanda/electron,farmisen/electron,tomashanacek/electron,yan-foto/electron,darwin/electron,ianscrivener/electron,rprichard/electron,hokein/atom-shell,felixrieseberg/electron,gamedevsam/electron,Neron-X5/electron,natgolov/electron,wolfflow/electron,electron/electron,chrisswk/electron,systembugtj/electron,rhencke/electron,faizalpribadi/electron,jaanus/electron,simongregory/electron,yan-foto/electron,brave/muon,felixrieseberg/electron,neutrous/electron,leethomas/electron,brave/muon,oiledCode/electron,deepak1556/atom-shell,LadyNaggaga/electron,robinvandernoord/electron,jjz/electron,stevekinney/electron,beni55/electron,jiaz/electron,DivyaKMenon/electron,bright-sparks/electron,bbondy/electron,preco21/electron,edulan/electron,twolfson/electron,rajatsingla28/electron,webmechanicx/electron,jcblw/electron,kenmozi/electron,tomashanacek/electron,gerhardberger/electron,electron/electron,RIAEvangelist/electron,Jacobichou/electron,Evercoder/electron,jsutcodes/electron,leethomas/electron,zhakui/electron,yan-foto/electron,rreimann/electron,miniak/electron,eriser/electron,eriser/electron,felixrieseberg/electron,Rokt33r/electron,yalexx/electron,jhen0409/electron,xfstudio/electron,natgolov/electron,dkfiresky/electron,matiasinsaurralde/electron,lrlna/electron,mirrh/electron,gerhardberger/electron,rprichard/electron,BionicClick/electron,seanchas116/electron,Gerhut/electron,JussMee15/electron,rhencke/electron,faizalpribadi/electron,greyhwndz/electron,LadyNaggaga/electron,faizalpribadi/electron,yalexx/electron,ianscrivener/electron,bpasero/electron,icattlecoder/electron,bruce/electron,ankitaggarwal011/electron,aecca/electron,LadyNaggaga/electron,joneit/electron,voidbridge/electron,gstack/infinium-shell,mattotodd/electron,the-ress/electron,shaundunne/electron,DivyaKMenon/electron,bright-sparks/electron,kenmozi/electron,natgolov/electron,micalan/electron,micalan/electron,mubassirhayat/electron,jonatasfreitasv/electron,felixrieseberg/electron,mirrh/electron,adamjgray/electron,cos2004/electron,natgolov/electron,Gerhut/electron,deed02392/electron,joaomoreno/atom-shell,Zagorakiss/electron,farmisen/electron,bbondy/electron,Jacobichou/electron,thingsinjars/electron,arusakov/electron,subblue/electron,electron/electron,aaron-goshine/electron,xiruibing/electron,zhakui/electron,Faiz7412/electron,roadev/electron,jaanus/electron,Floato/electron,jaanus/electron,medixdev/electron,rreimann/electron,cqqccqc/electron,IonicaBizauKitchen/electron,RobertJGabriel/electron,kikong/electron,gerhardberger/electron,christian-bromann/electron,aliib/electron,egoist/electron,greyhwndz/electron,thomsonreuters/electron,robinvandernoord/electron,nicobot/electron,deed02392/electron,twolfson/electron,aichingm/electron,etiktin/electron,simonfork/electron,fffej/electron,Zagorakiss/electron,jsutcodes/electron,pombredanne/electron,SufianHassan/electron,tonyganch/electron,systembugtj/electron,voidbridge/electron,baiwyc119/electron,meowlab/electron,xfstudio/electron,pandoraui/electron,systembugtj/electron,trankmichael/electron,timruffles/electron,mattdesl/electron,shockone/electron,kazupon/electron,etiktin/electron,anko/electron,eriser/electron,jlhbaseball15/electron,takashi/electron,pirafrank/electron,egoist/electron,seanchas116/electron,minggo/electron,John-Lin/electron,Zagorakiss/electron,fritx/electron,sshiting/electron,kostia/electron,kenmozi/electron,bpasero/electron,cqqccqc/electron,trigrass2/electron,destan/electron,timruffles/electron,trankmichael/electron,nicholasess/electron,kcrt/electron,d-salas/electron,jsutcodes/electron,destan/electron,wan-qy/electron,tomashanacek/electron,cos2004/electron,fabien-d/electron,mjaniszew/electron,mirrh/electron,pandoraui/electron,nekuz0r/electron,roadev/electron,arusakov/electron,thingsinjars/electron,ervinb/electron,biblerule/UMCTelnetHub,bwiggs/electron,miniak/electron,kcrt/electron,gabriel/electron,davazp/electron,adamjgray/electron,icattlecoder/electron,bobwol/electron,smczk/electron,BionicClick/electron,eric-seekas/electron,beni55/electron,minggo/electron,noikiy/electron,noikiy/electron,biblerule/UMCTelnetHub,brenca/electron,sircharleswatson/electron,arturts/electron,jhen0409/electron,brave/electron,iftekeriba/electron,synaptek/electron,gabriel/electron,shennushi/electron,eriser/electron,thingsinjars/electron,arusakov/electron,synaptek/electron,twolfson/electron,rreimann/electron,JesselJohn/electron,mirrh/electron,soulteary/electron,tinydew4/electron,tylergibson/electron,JesselJohn/electron,arturts/electron,timruffles/electron,pirafrank/electron,shennushi/electron,natgolov/electron,rajatsingla28/electron,hokein/atom-shell,subblue/electron,gamedevsam/electron,oiledCode/electron,joaomoreno/atom-shell,chriskdon/electron,posix4e/electron,jsutcodes/electron,greyhwndz/electron,stevekinney/electron,kenmozi/electron,jhen0409/electron,stevekinney/electron,rajatsingla28/electron,deed02392/electron,astoilkov/electron,Ivshti/electron,systembugtj/electron,stevemao/electron,maxogden/atom-shell,tonyganch/electron,vaginessa/electron,oiledCode/electron,thomsonreuters/electron,destan/electron,mjaniszew/electron,joneit/electron,lzpfmh/electron,baiwyc119/electron,Evercoder/electron,MaxWhere/electron,evgenyzinoviev/electron,rhencke/electron,Jonekee/electron,fireball-x/atom-shell,roadev/electron,JesselJohn/electron,jlhbaseball15/electron,kcrt/electron,aecca/electron,Andrey-Pavlov/electron,saronwei/electron,michaelchiche/electron,christian-bromann/electron,Jonekee/electron,carsonmcdonald/electron,digideskio/electron,Floato/electron,fabien-d/electron,anko/electron,yan-foto/electron,nagyistoce/electron-atom-shell,aichingm/electron,adcentury/electron,leolujuyi/electron,howmuchcomputer/electron,cqqccqc/electron,SufianHassan/electron,medixdev/electron,ankitaggarwal011/electron,jacksondc/electron,Gerhut/electron,roadev/electron,mubassirhayat/electron,destan/electron,fireball-x/atom-shell,bitemyapp/electron,joneit/electron,simongregory/electron,wan-qy/electron,voidbridge/electron,aliib/electron,trigrass2/electron,mattotodd/electron,arturts/electron,IonicaBizauKitchen/electron,mattdesl/electron,jiaz/electron,MaxWhere/electron,nagyistoce/electron-atom-shell,bbondy/electron,faizalpribadi/electron,jlhbaseball15/electron,trankmichael/electron,fritx/electron,shiftkey/electron,brenca/electron,Neron-X5/electron,fomojola/electron,coderhaoxin/electron,dongjoon-hyun/electron,stevemao/electron,gbn972/electron,sshiting/electron,shennushi/electron,farmisen/electron,bpasero/electron,seanchas116/electron,arturts/electron,biblerule/UMCTelnetHub,Faiz7412/electron,benweissmann/electron,coderhaoxin/electron,jtburke/electron,jlhbaseball15/electron,chrisswk/electron,jcblw/electron,posix4e/electron,nekuz0r/electron,pombredanne/electron,dongjoon-hyun/electron,setzer777/electron,mrwizard82d1/electron,lzpfmh/electron,egoist/electron,IonicaBizauKitchen/electron,eric-seekas/electron,leolujuyi/electron,pombredanne/electron,tonyganch/electron,vipulroxx/electron,etiktin/electron,jcblw/electron,RIAEvangelist/electron,adamjgray/electron,egoist/electron,setzer777/electron,tincan24/electron,coderhaoxin/electron,leftstick/electron,kikong/electron,gerhardberger/electron,deepak1556/atom-shell,adcentury/electron,jannishuebl/electron,thompsonemerson/electron,arusakov/electron,felixrieseberg/electron,mhkeller/electron,nicholasess/electron,Rokt33r/electron,evgenyzinoviev/electron,setzer777/electron,renaesop/electron,chrisswk/electron,Floato/electron,soulteary/electron,electron/electron,jjz/electron,MaxWhere/electron,oiledCode/electron,chriskdon/electron,arusakov/electron,fomojola/electron,bright-sparks/electron,LadyNaggaga/electron,yan-foto/electron,jhen0409/electron,tincan24/electron,smczk/electron,thingsinjars/electron,dahal/electron,baiwyc119/electron,John-Lin/electron,greyhwndz/electron,mhkeller/electron,adamjgray/electron,abhishekgahlot/electron,IonicaBizauKitchen/electron,DivyaKMenon/electron,wan-qy/electron,jtburke/electron,synaptek/electron,jlord/electron,jtburke/electron,gstack/infinium-shell,mattdesl/electron,yalexx/electron,benweissmann/electron,Faiz7412/electron,yan-foto/electron,coderhaoxin/electron,noikiy/electron,neutrous/electron,miniak/electron,JussMee15/electron,coderhaoxin/electron,jiaz/electron,cos2004/electron,Faiz7412/electron,abhishekgahlot/electron,nekuz0r/electron,gamedevsam/electron,bobwol/electron,shockone/electron,Evercoder/electron,pandoraui/electron,dahal/electron,jcblw/electron,d-salas/electron,Gerhut/electron,xiruibing/electron,jlord/electron,tinydew4/electron,joneit/electron,meowlab/electron,JussMee15/electron,howmuchcomputer/electron,renaesop/electron,kikong/electron,shennushi/electron,thompsonemerson/electron,adcentury/electron,brave/muon,nicobot/electron,leftstick/electron,meowlab/electron,lrlna/electron,edulan/electron,faizalpribadi/electron,joaomoreno/atom-shell,simonfork/electron,RobertJGabriel/electron,xiruibing/electron,ianscrivener/electron,the-ress/electron,shockone/electron,christian-bromann/electron,Jonekee/electron,bpasero/electron,roadev/electron,simongregory/electron,eric-seekas/electron,neutrous/electron,davazp/electron,nekuz0r/electron,micalan/electron,shockone/electron,vipulroxx/electron,brave/electron,mrwizard82d1/electron,christian-bromann/electron,subblue/electron,preco21/electron,carsonmcdonald/electron,leethomas/electron,anko/electron,xiruibing/electron,d-salas/electron,matiasinsaurralde/electron,stevemao/electron,tomashanacek/electron,thompsonemerson/electron,bbondy/electron,Neron-X5/electron,evgenyzinoviev/electron,gabriel/electron,benweissmann/electron,adamjgray/electron,cos2004/electron,thingsinjars/electron,bwiggs/electron,pirafrank/electron,ervinb/electron,bwiggs/electron,JussMee15/electron,astoilkov/electron,SufianHassan/electron,saronwei/electron,roadev/electron,posix4e/electron,tylergibson/electron,vHanda/electron,gstack/infinium-shell,MaxWhere/electron,nicholasess/electron,Rokt33r/electron,brave/muon,fritx/electron,DivyaKMenon/electron,cqqccqc/electron,digideskio/electron,SufianHassan/electron,tinydew4/electron,RobertJGabriel/electron,jcblw/electron,lrlna/electron,rsvip/electron,rhencke/electron,edulan/electron,gamedevsam/electron,thomsonreuters/electron,abhishekgahlot/electron,meowlab/electron,deepak1556/atom-shell,shiftkey/electron,maxogden/atom-shell,Rokt33r/electron,biblerule/UMCTelnetHub,christian-bromann/electron,jannishuebl/electron,carsonmcdonald/electron,howmuchcomputer/electron,kokdemo/electron,farmisen/electron,abhishekgahlot/electron,yalexx/electron,gerhardberger/electron,wolfflow/electron,iftekeriba/electron,vipulroxx/electron,ankitaggarwal011/electron,Floato/electron,dkfiresky/electron,preco21/electron,jiaz/electron,brave/muon,bwiggs/electron,fffej/electron,RobertJGabriel/electron,jaanus/electron,jlord/electron,preco21/electron,RobertJGabriel/electron,digideskio/electron,jlhbaseball15/electron,Andrey-Pavlov/electron,vipulroxx/electron,fomojola/electron,gabrielPeart/electron,digideskio/electron,bitemyapp/electron,ankitaggarwal011/electron,lrlna/electron,mattdesl/electron,shiftkey/electron,bright-sparks/electron,benweissmann/electron,jjz/electron,jlord/electron,biblerule/UMCTelnetHub,RIAEvangelist/electron,lrlna/electron,trigrass2/electron,posix4e/electron,chrisswk/electron,zhakui/electron,ervinb/electron,gbn972/electron,mirrh/electron,d-salas/electron,shaundunne/electron,webmechanicx/electron,nekuz0r/electron,pirafrank/electron,lzpfmh/electron,posix4e/electron,minggo/electron,preco21/electron,setzer777/electron,davazp/electron,simonfork/electron,kokdemo/electron,zhakui/electron,aecca/electron,mubassirhayat/electron,davazp/electron,tincan24/electron,beni55/electron,mattdesl/electron,tomashanacek/electron,sshiting/electron,iftekeriba/electron,jannishuebl/electron,nicobot/electron,bitemyapp/electron,sky7sea/electron,stevemao/electron,baiwyc119/electron,tonyganch/electron,bpasero/electron,astoilkov/electron,jonatasfreitasv/electron,leethomas/electron,rprichard/electron,astoilkov/electron,thomsonreuters/electron,Floato/electron,pirafrank/electron,gstack/infinium-shell,Andrey-Pavlov/electron,micalan/electron,Zagorakiss/electron,greyhwndz/electron,shennushi/electron,vHanda/electron,aichingm/electron | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'ea1a7e85a3de1878e5656110c76f4d2d8af41c6e'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
Upgrade libchromiumcontent for gin headers. | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '2cf80c1743e370c12eb7bf078eb425f3cc355383'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| <commit_before>#!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'ea1a7e85a3de1878e5656110c76f4d2d8af41c6e'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
<commit_msg>Upgrade libchromiumcontent for gin headers.<commit_after> | #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '2cf80c1743e370c12eb7bf078eb425f3cc355383'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| #!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'ea1a7e85a3de1878e5656110c76f4d2d8af41c6e'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
Upgrade libchromiumcontent for gin headers.#!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '2cf80c1743e370c12eb7bf078eb425f3cc355383'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| <commit_before>#!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'ea1a7e85a3de1878e5656110c76f4d2d8af41c6e'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
<commit_msg>Upgrade libchromiumcontent for gin headers.<commit_after>#!/usr/bin/env python
import platform
import sys
NODE_VERSION = 'v0.11.13'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '2cf80c1743e370c12eb7bf078eb425f3cc355383'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
|
e3079cdf31f6fb13ca9d91de313301c8a76d3cd8 | backend/unichat/models/user.py | backend/unichat/models/user.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=100,
db_index=True,
help_text=("The user's active cookie.")
)
| Decrease cookie length in User model from 255 to 100 chars | Decrease cookie length in User model from 255 to 100 chars
| Python | mit | dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
Decrease cookie length in User model from 255 to 100 chars | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=100,
db_index=True,
help_text=("The user's active cookie.")
)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
<commit_msg>Decrease cookie length in User model from 255 to 100 chars<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=100,
db_index=True,
help_text=("The user's active cookie.")
)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
Decrease cookie length in User model from 255 to 100 chars# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=100,
db_index=True,
help_text=("The user's active cookie.")
)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=255,
db_index=True,
help_text=("The user's active cookie.")
)
<commit_msg>Decrease cookie length in User model from 255 to 100 chars<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
class User(models.Model):
MALE = -1
UNDEFINED = 0
FEMALE = 1
GENDER_CHOICES = (
(MALE, 'Male'),
(UNDEFINED, 'Undefined'),
(FEMALE, 'Female')
)
school = models.ForeignKey('unichat.School')
email = models.EmailField(
unique=True,
help_text=("The user's academic email.")
)
password = models.CharField(
max_length=100,
help_text=("The user's password.")
)
gender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The user's gender, by default UNDEFINED, unless otherwise "
"explicitly specified by the user.")
)
interestedInGender = models.IntegerField(
default=0,
choices=GENDER_CHOICES,
help_text=("The gender that the user is interested in talking to, by "
"default UNDEFINED.")
)
interestedInSchools = models.ManyToManyField('unichat.School', related_name='user_interested_schools')
cookie = models.CharField(
default='',
max_length=100,
db_index=True,
help_text=("The user's active cookie.")
)
|
4953021eedbd73dc3d66455c5dff352a852d6474 | test/test_integration.py | test/test_integration.py | import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
| import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
print("Body:" + response.read())
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
| Add debug info to the test | Add debug info to the test | Python | apache-2.0 | dhiaayachi/dynx,dhiaayachi/dynx | import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
Add debug info to the test | import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
print("Body:" + response.read())
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
<commit_msg>Add debug info to the test<commit_after> | import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
print("Body:" + response.read())
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
| import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
Add debug info to the testimport unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
print("Body:" + response.read())
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
<commit_msg>Add debug info to the test<commit_after>import unittest
import http.client
class TestStringMethods(unittest.TestCase):
def test_404NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 404)
connRouter.close()
connConfig.close()
def test_200NoConfig(self):
connRouter = http.client.HTTPConnection("localhost", 8666)
connConfig = http.client.HTTPConnection("localhost", 8888)
connConfig.request("GET","/configure?location=/google&upstream=http://www.google.com&ttl=10")
response = connConfig.getresponse()
print("Body:" + response.read())
self.assertEqual(response.status, 200)
connRouter.request("GET", "/google")
response = connRouter.getresponse()
self.assertEqual(response.status, 200)
connRouter.close()
connConfig.close()
if __name__ == '__main__':
unittest.main()
|
0ec2c192a3f8428bb487add6a70aef100f02c036 | segpy/portability.py | segpy/portability.py | import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
long_int = int
else:
long_int = long
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
| import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
| Remove Python 2.7 crutch for int/long | Remove Python 2.7 crutch for int/long
| Python | agpl-3.0 | hohogpb/segpy,abingham/segpy,kjellkongsvik/segpy,Kramer477/segpy,kwinkunks/segpy,stevejpurves/segpy,asbjorn/segpy | import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
long_int = int
else:
long_int = long
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
Remove Python 2.7 crutch for int/long | import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
| <commit_before>import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
long_int = int
else:
long_int = long
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
<commit_msg>Remove Python 2.7 crutch for int/long<commit_after> | import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
| import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
long_int = int
else:
long_int = long
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
Remove Python 2.7 crutch for int/longimport os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
| <commit_before>import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
long_int = int
else:
long_int = long
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
<commit_msg>Remove Python 2.7 crutch for int/long<commit_after>import os
import sys
EMPTY_BYTE_STRING = b'' if sys.version_info >= (3, 0) else ''
if sys.version_info >= (3, 0):
def byte_string(integers):
return bytes(integers)
else:
def byte_string(integers):
return EMPTY_BYTE_STRING.join(chr(i) for i in integers)
if sys.version_info >= (3, 0):
import reprlib
reprlib = reprlib # Keep the static analyzer happy
else:
import repr as reprlib
if sys.version_info >= (3, 0):
izip = zip
from itertools import zip_longest as izip_longest
else:
from itertools import (izip, izip_longest)
izip = izip # Keep the static analyzer happy
izip_longest = izip_longest # Keep the static analyzer happy
if sys.version_info >= (3, 0):
def four_bytes(byte_str):
a, b, c, d = byte_str[:4]
return a, b, c, d
else:
def four_bytes(byte_str):
a = ord(byte_str[0])
b = ord(byte_str[1])
c = ord(byte_str[2])
d = ord(byte_str[3])
return a, b, c, d
if sys.version_info >= (3, 0):
unicode = str
else:
unicode = unicode
|
da86340568ff03c6e612aa68a5cd9f275cbf3375 | coda/coda_replication/factories.py | coda/coda_replication/factories.py | """
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.QueueEntry
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
| """
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
class Meta:
model = models.QueueEntry
| Move the QueueEntryFactory Meta class definition below the attributes per the Django code style guide. | Move the QueueEntryFactory Meta class definition below the attributes per the Django code style guide.
| Python | bsd-3-clause | unt-libraries/coda,unt-libraries/coda,unt-libraries/coda,unt-libraries/coda | """
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.QueueEntry
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
Move the QueueEntryFactory Meta class definition below the attributes per the Django code style guide. | """
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
class Meta:
model = models.QueueEntry
| <commit_before>"""
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.QueueEntry
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
<commit_msg>Move the QueueEntryFactory Meta class definition below the attributes per the Django code style guide.<commit_after> | """
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
class Meta:
model = models.QueueEntry
| """
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.QueueEntry
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
Move the QueueEntryFactory Meta class definition below the attributes per the Django code style guide."""
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
class Meta:
model = models.QueueEntry
| <commit_before>"""
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
class Meta:
model = models.QueueEntry
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
<commit_msg>Move the QueueEntryFactory Meta class definition below the attributes per the Django code style guide.<commit_after>"""
Coda Replication Model factories for test fixtures.
"""
from datetime import datetime
import factory
from factory import fuzzy
from . import models
class QueueEntryFactory(factory.django.DjangoModelFactory):
ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n))
bytes = fuzzy.FuzzyInteger(100000000)
files = fuzzy.FuzzyInteger(50, 500)
url_list = fuzzy.FuzzyText(length=500)
status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10))
harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01))
harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01))
queue_position = fuzzy.FuzzyInteger(1, 100)
class Meta:
model = models.QueueEntry
|
72ec6a22f94ca1744d2241202f33c0bc777521ca | supplements/fixtures/factories.py | supplements/fixtures/factories.py | # making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.Factory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.Factory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.Factory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.Factory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
print (group)
self.ingredient_composition.add(group)
| # making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.DjangoModelFactory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.DjangoModelFactory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.DjangoModelFactory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.DjangoModelFactory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.ingredient_composition.add(group)
| Swap out native factory.Factory with Django specific factory .... now all factory() calls actually save versus ... before nasty assumption | Swap out native factory.Factory with Django specific factory .... now all factory() calls actually save versus ... before nasty assumption
| Python | mit | jeffshek/betterself,jeffshek/betterself,jeffshek/betterself,jeffshek/betterself | # making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.Factory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.Factory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.Factory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.Factory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
print (group)
self.ingredient_composition.add(group)
Swap out native factory.Factory with Django specific factory .... now all factory() calls actually save versus ... before nasty assumption | # making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.DjangoModelFactory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.DjangoModelFactory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.DjangoModelFactory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.DjangoModelFactory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.ingredient_composition.add(group)
| <commit_before># making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.Factory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.Factory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.Factory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.Factory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
print (group)
self.ingredient_composition.add(group)
<commit_msg>Swap out native factory.Factory with Django specific factory .... now all factory() calls actually save versus ... before nasty assumption<commit_after> | # making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.DjangoModelFactory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.DjangoModelFactory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.DjangoModelFactory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.DjangoModelFactory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.ingredient_composition.add(group)
| # making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.Factory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.Factory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.Factory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.Factory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
print (group)
self.ingredient_composition.add(group)
Swap out native factory.Factory with Django specific factory .... now all factory() calls actually save versus ... before nasty assumption# making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.DjangoModelFactory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.DjangoModelFactory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.DjangoModelFactory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.DjangoModelFactory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.ingredient_composition.add(group)
| <commit_before># making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.Factory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.Factory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.Factory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.Factory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
print (group)
self.ingredient_composition.add(group)
<commit_msg>Swap out native factory.Factory with Django specific factory .... now all factory() calls actually save versus ... before nasty assumption<commit_after># making a bet that factory_boy will pan out as we get more data
import factory
from supplements.models import Ingredient, Measurement, IngredientComposition, Supplement
DEFAULT_INGREDIENT_NAME = 'Leucine'
DEFAULT_INGREDIENT_HL_MINUTE = 50
DEFAULT_MEASUREMENT_NAME = 'milligram'
DEFAULT_MEASUREMENT_SHORT_NAME = 'mg'
DEFAULT_SUPPLEMENT_NAME = 'BCAA'
class IngredientFactory(factory.DjangoModelFactory):
class Meta:
model = Ingredient
name = DEFAULT_INGREDIENT_NAME
half_life_minutes = DEFAULT_INGREDIENT_HL_MINUTE
class MeasurementFactory(factory.DjangoModelFactory):
class Meta:
model = Measurement
name = DEFAULT_MEASUREMENT_NAME
class IngredientCompositionFactory(factory.DjangoModelFactory):
class Meta:
model = IngredientComposition
ingredient = factory.SubFactory(IngredientFactory)
measurement_unit = factory.SubFactory(MeasurementFactory)
class SupplementFactory(factory.DjangoModelFactory):
class Meta:
model = Supplement
name = DEFAULT_SUPPLEMENT_NAME
@factory.post_generation
def ingredient_composition(self, create, extracted, **kwargs):
if not create:
# Simple build, do nothing.
return
if extracted:
# A list of groups were passed in, use them
for group in extracted:
self.ingredient_composition.add(group)
|
3e1f1e515b4392d98fe221ce4c14daefc531a1fe | tests/test_compatibility/tests.py | tests/test_compatibility/tests.py | """Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
| """Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
class DjangoCompatibility(TestCase):
def test_autofield_compatible(self):
"""Test that the light weigh AutoField is compatible in all Django ver."""
primary_key = [x for x in Lead._meta.fields if x.primary_key][0]
self.assertEqual(primary_key.auto_created, True)
self.assertEqual(primary_key.get_internal_type(), 'AutoField')
self.assertIn(primary_key.name, ('id', 'Id'))
| Test for compatibility of primary key AutoField | Test for compatibility of primary key AutoField
| Python | mit | philchristensen/django-salesforce,hynekcer/django-salesforce,django-salesforce/django-salesforce,chromakey/django-salesforce,philchristensen/django-salesforce,django-salesforce/django-salesforce,hynekcer/django-salesforce,chromakey/django-salesforce,django-salesforce/django-salesforce,hynekcer/django-salesforce,chromakey/django-salesforce,philchristensen/django-salesforce | """Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
Test for compatibility of primary key AutoField | """Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
class DjangoCompatibility(TestCase):
def test_autofield_compatible(self):
"""Test that the light weigh AutoField is compatible in all Django ver."""
primary_key = [x for x in Lead._meta.fields if x.primary_key][0]
self.assertEqual(primary_key.auto_created, True)
self.assertEqual(primary_key.get_internal_type(), 'AutoField')
self.assertIn(primary_key.name, ('id', 'Id'))
| <commit_before>"""Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
<commit_msg>Test for compatibility of primary key AutoField<commit_after> | """Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
class DjangoCompatibility(TestCase):
def test_autofield_compatible(self):
"""Test that the light weigh AutoField is compatible in all Django ver."""
primary_key = [x for x in Lead._meta.fields if x.primary_key][0]
self.assertEqual(primary_key.auto_created, True)
self.assertEqual(primary_key.get_internal_type(), 'AutoField')
self.assertIn(primary_key.name, ('id', 'Id'))
| """Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
Test for compatibility of primary key AutoField"""Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
class DjangoCompatibility(TestCase):
def test_autofield_compatible(self):
"""Test that the light weigh AutoField is compatible in all Django ver."""
primary_key = [x for x in Lead._meta.fields if x.primary_key][0]
self.assertEqual(primary_key.auto_created, True)
self.assertEqual(primary_key.get_internal_type(), 'AutoField')
self.assertIn(primary_key.name, ('id', 'Id'))
| <commit_before>"""Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
<commit_msg>Test for compatibility of primary key AutoField<commit_after>"""Backward compatible behaviour with primary key 'Id'."""
from __future__ import absolute_import
from django.conf import settings
from django.test import TestCase
from salesforce.backend import sf_alias
from tests.test_compatibility.models import Lead, User
current_user = settings.DATABASES[sf_alias]['USER']
class CompatibilityTest(TestCase):
def test_capitalized_id(self):
test_lead = Lead(Company='sf_test lead', LastName='name')
test_lead.save()
try:
refreshed_lead = Lead.objects.get(Id=test_lead.Id)
self.assertEqual(refreshed_lead.Id, test_lead.Id)
self.assertEqual(refreshed_lead.Owner.Username, current_user)
leads = Lead.objects.filter(Company='sf_test lead', LastName='name')
self.assertEqual(len(leads), 1)
repr(test_lead.__dict__)
finally:
test_lead.delete()
class DjangoCompatibility(TestCase):
def test_autofield_compatible(self):
"""Test that the light weigh AutoField is compatible in all Django ver."""
primary_key = [x for x in Lead._meta.fields if x.primary_key][0]
self.assertEqual(primary_key.auto_created, True)
self.assertEqual(primary_key.get_internal_type(), 'AutoField')
self.assertIn(primary_key.name, ('id', 'Id'))
|
20fce7b482fd11a65494014e14aabecbe4e87683 | src/cmt/standard_names/snbuild.py | src/cmt/standard_names/snbuild.py | #! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_model_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.unique_operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
| #! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
from .io import from_list_file
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_list_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
| Read names line-by-line from a plain text file. | Read names line-by-line from a plain text file.
| Python | mit | csdms/standard_names,csdms/standard_names | #! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_model_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.unique_operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
Read names line-by-line from a plain text file. | #! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
from .io import from_list_file
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_list_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
| <commit_before>#! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_model_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.unique_operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
<commit_msg>Read names line-by-line from a plain text file.<commit_after> | #! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
from .io import from_list_file
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_list_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
| #! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_model_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.unique_operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
Read names line-by-line from a plain text file.#! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
from .io import from_list_file
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_list_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
| <commit_before>#! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_model_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.unique_operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
<commit_msg>Read names line-by-line from a plain text file.<commit_after>#! /usr/bin/env python
"""
Example usage:
snbuild data/models.yaml data/scraped.yaml \
> standard_names/data/standard_names.yaml
"""
import os
from . import (from_model_file, FORMATTERS, Collection)
from .io import from_list_file
def main():
"""
Build a list of CSDMS standard names for YAML description files.
"""
import argparse
parser = argparse.ArgumentParser(
'Scan a model description file for CSDMS standard names')
parser.add_argument('file', nargs='+', type=argparse.FileType('r'),
help='YAML file describing model exchange items')
args = parser.parse_args()
names = Collection()
for model_file in args.file:
names |= from_list_file(model_file)
formatter = FORMATTERS['yaml']
print '%YAML 1.2'
print '---'
print os.linesep.join([
formatter(names.names(), sorted=True, heading='names'),
'---',
formatter(names.objects(), sorted=True, heading='objects'),
'---',
formatter(names.quantities(), sorted=True, heading='quantities'),
'---',
formatter(names.operators(), sorted=True, heading='operators'),
'...',
])
if __name__ == '__main__':
main()
|
4b8fbe2914aec5ddcf7f63c6b7ca2244ec022084 | tests/test_crossbuild.py | tests/test_crossbuild.py | from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
| from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_osx_clientt(self):
with patch('crossbuild.build_osx_client') as mock:
main(['osx-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_agent(self):
with patch('crossbuild.build_win_agent') as mock:
main(['win-agent', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
| Add main osx-client command test. | Add main osx-client command test. | Python | agpl-3.0 | mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju | from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
Add main osx-client command test. | from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_osx_clientt(self):
with patch('crossbuild.build_osx_client') as mock:
main(['osx-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_agent(self):
with patch('crossbuild.build_win_agent') as mock:
main(['win-agent', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
| <commit_before>from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
<commit_msg>Add main osx-client command test.<commit_after> | from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_osx_clientt(self):
with patch('crossbuild.build_osx_client') as mock:
main(['osx-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_agent(self):
with patch('crossbuild.build_win_agent') as mock:
main(['win-agent', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
| from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
Add main osx-client command test.from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_osx_clientt(self):
with patch('crossbuild.build_osx_client') as mock:
main(['osx-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_agent(self):
with patch('crossbuild.build_win_agent') as mock:
main(['win-agent', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
| <commit_before>from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
<commit_msg>Add main osx-client command test.<commit_after>from mock import patch
from unittest import TestCase
from crossbuild import (
main,
)
class CrossBuildTestCase(TestCase):
def test_main_setup(self):
with patch('crossbuild.setup_cross_building') as mock:
main(['-d', '-v', 'setup', '--build-dir', './foo'])
args, kwargs = mock.call_args
self.assertEqual(('./foo', ), args)
self.assertEqual({'dry_run': True, 'verbose': True}, kwargs)
def test_main_osx_clientt(self):
with patch('crossbuild.build_osx_client') as mock:
main(['osx-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_client(self):
with patch('crossbuild.build_win_client') as mock:
main(['win-client', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
def test_main_win_agent(self):
with patch('crossbuild.build_win_agent') as mock:
main(['win-agent', '--build-dir', './foo', 'bar.1.2.3.tar.gz'])
args, kwargs = mock.call_args
self.assertEqual(('bar.1.2.3.tar.gz', './foo'), args)
self.assertEqual({'dry_run': False, 'verbose': False}, kwargs)
|
abdfef81c3146b720c561eaedf8592cd640262a0 | falcom/table.py | falcom/table.py | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text.rstrip("\n")
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
self.__create_internal_structure()
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.__rows)
def __iter__ (self):
return iter(self.__rows)
def __getitem__ (self, key):
return self.__rows[key]
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
def __create_internal_structure (self):
if self.text:
self.__rows = [tuple(r.split("\t"))
for r in self.text.rstrip("\n").split("\n")]
else:
self.__rows = []
| Split input text on init | Split input text on init
| Python | bsd-3-clause | mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text.rstrip("\n")
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
Split input text on init | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
self.__create_internal_structure()
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.__rows)
def __iter__ (self):
return iter(self.__rows)
def __getitem__ (self, key):
return self.__rows[key]
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
def __create_internal_structure (self):
if self.text:
self.__rows = [tuple(r.split("\t"))
for r in self.text.rstrip("\n").split("\n")]
else:
self.__rows = []
| <commit_before># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text.rstrip("\n")
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
<commit_msg>Split input text on init<commit_after> | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
self.__create_internal_structure()
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.__rows)
def __iter__ (self):
return iter(self.__rows)
def __getitem__ (self, key):
return self.__rows[key]
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
def __create_internal_structure (self):
if self.text:
self.__rows = [tuple(r.split("\t"))
for r in self.text.rstrip("\n").split("\n")]
else:
self.__rows = []
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text.rstrip("\n")
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
Split input text on init# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
self.__create_internal_structure()
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.__rows)
def __iter__ (self):
return iter(self.__rows)
def __getitem__ (self, key):
return self.__rows[key]
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
def __create_internal_structure (self):
if self.text:
self.__rows = [tuple(r.split("\t"))
for r in self.text.rstrip("\n").split("\n")]
else:
self.__rows = []
| <commit_before># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text.rstrip("\n")
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
<commit_msg>Split input text on init<commit_after># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
self.text = tab_separated_text
self.__raise_error_if_carriage_returns()
else:
self.text = tab_separated_text
self.__create_internal_structure()
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.__rows)
def __iter__ (self):
return iter(self.__rows)
def __getitem__ (self, key):
return self.__rows[key]
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
def __raise_error_if_carriage_returns (self):
if "\r" in self.text:
raise self.InputStrContainsCarriageReturn
def __create_internal_structure (self):
if self.text:
self.__rows = [tuple(r.split("\t"))
for r in self.text.rstrip("\n").split("\n")]
else:
self.__rows = []
|
4d0e6265911199b1376d0f52e249625180a0500d | third_party/py/gflags/__init__.py | third_party/py/gflags/__init__.py | # gflags raises DuplicateFlagError when defining default flags from packages
# with different names, so this pseudo-package must mimic the core gflags
# package name.
__name__ += ".gflags" # i.e. "third_party.py.gflags.gflags"
from gflags import *
| from __future__ import absolute_import
from gflags import *
| Use PEP 328 absolute import for third_party python gflags. | Use PEP 328 absolute import for third_party python gflags.
Commit d926bc40260549b997a6a5a1e82d9e7999dbb65e fixed a bug (#4206, #4208) in
the third_party python gflags pseudo-package but added excessive runtime
warnings (see #4212). Using the python PEP 328 (absolute import) implementation
eliminates these warnings while properly addressing the original bug.
| Python | apache-2.0 | meteorcloudy/bazel,perezd/bazel,meteorcloudy/bazel,ButterflyNetwork/bazel,akira-baruah/bazel,davidzchen/bazel,akira-baruah/bazel,ulfjack/bazel,twitter-forks/bazel,safarmer/bazel,aehlig/bazel,dslomov/bazel-windows,katre/bazel,bazelbuild/bazel,twitter-forks/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,ButterflyNetwork/bazel,safarmer/bazel,cushon/bazel,cushon/bazel,perezd/bazel,cushon/bazel,akira-baruah/bazel,davidzchen/bazel,ulfjack/bazel,davidzchen/bazel,akira-baruah/bazel,aehlig/bazel,perezd/bazel,katre/bazel,dslomov/bazel-windows,dslomov/bazel-windows,perezd/bazel,dslomov/bazel,aehlig/bazel,aehlig/bazel,ulfjack/bazel,twitter-forks/bazel,aehlig/bazel,twitter-forks/bazel,cushon/bazel,ButterflyNetwork/bazel,safarmer/bazel,meteorcloudy/bazel,dslomov/bazel-windows,werkt/bazel,werkt/bazel,ulfjack/bazel,dslomov/bazel,akira-baruah/bazel,dropbox/bazel,safarmer/bazel,ButterflyNetwork/bazel,dropbox/bazel,dslomov/bazel-windows,perezd/bazel,katre/bazel,meteorcloudy/bazel,dropbox/bazel,ulfjack/bazel,ulfjack/bazel,bazelbuild/bazel,cushon/bazel,meteorcloudy/bazel,aehlig/bazel,ButterflyNetwork/bazel,safarmer/bazel,davidzchen/bazel,katre/bazel,twitter-forks/bazel,bazelbuild/bazel,bazelbuild/bazel,bazelbuild/bazel,dslomov/bazel,dslomov/bazel,dropbox/bazel,werkt/bazel,twitter-forks/bazel,twitter-forks/bazel,dslomov/bazel-windows,aehlig/bazel,werkt/bazel,perezd/bazel,perezd/bazel,katre/bazel,dropbox/bazel,davidzchen/bazel,dslomov/bazel,ulfjack/bazel,meteorcloudy/bazel,katre/bazel,dslomov/bazel,safarmer/bazel,meteorcloudy/bazel,dslomov/bazel,cushon/bazel,werkt/bazel,davidzchen/bazel,werkt/bazel,akira-baruah/bazel,dropbox/bazel,davidzchen/bazel | # gflags raises DuplicateFlagError when defining default flags from packages
# with different names, so this pseudo-package must mimic the core gflags
# package name.
__name__ += ".gflags" # i.e. "third_party.py.gflags.gflags"
from gflags import *
Use PEP 328 absolute import for third_party python gflags.
Commit d926bc40260549b997a6a5a1e82d9e7999dbb65e fixed a bug (#4206, #4208) in
the third_party python gflags pseudo-package but added excessive runtime
warnings (see #4212). Using the python PEP 328 (absolute import) implementation
eliminates these warnings while properly addressing the original bug. | from __future__ import absolute_import
from gflags import *
| <commit_before># gflags raises DuplicateFlagError when defining default flags from packages
# with different names, so this pseudo-package must mimic the core gflags
# package name.
__name__ += ".gflags" # i.e. "third_party.py.gflags.gflags"
from gflags import *
<commit_msg>Use PEP 328 absolute import for third_party python gflags.
Commit d926bc40260549b997a6a5a1e82d9e7999dbb65e fixed a bug (#4206, #4208) in
the third_party python gflags pseudo-package but added excessive runtime
warnings (see #4212). Using the python PEP 328 (absolute import) implementation
eliminates these warnings while properly addressing the original bug.<commit_after> | from __future__ import absolute_import
from gflags import *
| # gflags raises DuplicateFlagError when defining default flags from packages
# with different names, so this pseudo-package must mimic the core gflags
# package name.
__name__ += ".gflags" # i.e. "third_party.py.gflags.gflags"
from gflags import *
Use PEP 328 absolute import for third_party python gflags.
Commit d926bc40260549b997a6a5a1e82d9e7999dbb65e fixed a bug (#4206, #4208) in
the third_party python gflags pseudo-package but added excessive runtime
warnings (see #4212). Using the python PEP 328 (absolute import) implementation
eliminates these warnings while properly addressing the original bug.from __future__ import absolute_import
from gflags import *
| <commit_before># gflags raises DuplicateFlagError when defining default flags from packages
# with different names, so this pseudo-package must mimic the core gflags
# package name.
__name__ += ".gflags" # i.e. "third_party.py.gflags.gflags"
from gflags import *
<commit_msg>Use PEP 328 absolute import for third_party python gflags.
Commit d926bc40260549b997a6a5a1e82d9e7999dbb65e fixed a bug (#4206, #4208) in
the third_party python gflags pseudo-package but added excessive runtime
warnings (see #4212). Using the python PEP 328 (absolute import) implementation
eliminates these warnings while properly addressing the original bug.<commit_after>from __future__ import absolute_import
from gflags import *
|
ab14f4c86fca6daab9d67cc9b4c3581d76d5635a | foster/utils.py | foster/utils.py | import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
| import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
| Fix whitespace in foster/util.py to better comply with PEP8 | Fix whitespace in foster/util.py to better comply with PEP8
| Python | mit | hugollm/foster,hugollm/foster | import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
Fix whitespace in foster/util.py to better comply with PEP8 | import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
| <commit_before>import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
<commit_msg>Fix whitespace in foster/util.py to better comply with PEP8<commit_after> | import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
| import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
Fix whitespace in foster/util.py to better comply with PEP8import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
| <commit_before>import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
<commit_msg>Fix whitespace in foster/util.py to better comply with PEP8<commit_after>import os.path
import shutil
from string import Template
PIKE_DIR = os.path.dirname(__file__)
SAMPLES_DIR = os.path.join(PIKE_DIR, 'samples')
def sample_path(sample):
path = os.path.join(SAMPLES_DIR, sample)
return os.path.realpath(path)
def copy_sample(sample, target):
source = os.path.join(SAMPLES_DIR, sample)
shutil.copy(source, target)
def render_sample(sample, **kwargs):
source = os.path.join(SAMPLES_DIR, sample)
with open(source, 'r') as f:
text = f.read()
template = Template(text)
return template.substitute(kwargs)
|
26ffa0cdd1389e2a364531cd20e9f37ee1565cce | base/view_utils.py | base/view_utils.py | # django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
| # django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if k != 'o'
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
| Use 'o' as the order by parameter in clean_query_string | Use 'o' as the order by parameter in clean_query_string
| Python | mit | magnet-cl/django-project-template-py3,Angoreher/xcero,Angoreher/xcero,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,Angoreher/xcero,Angoreher/xcero | # django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
Use 'o' as the order by parameter in clean_query_string | # django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if k != 'o'
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
| <commit_before># django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
<commit_msg>Use 'o' as the order by parameter in clean_query_string<commit_after> | # django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if k != 'o'
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
| # django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
Use 'o' as the order by parameter in clean_query_string# django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if k != 'o'
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
| <commit_before># django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if not k.startswith('o')
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
<commit_msg>Use 'o' as the order by parameter in clean_query_string<commit_after># django
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# standard library
def paginate(request, objects, page_size=25):
paginator = Paginator(objects, page_size)
page = request.GET.get('p')
try:
paginated_objects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
paginated_objects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
paginated_objects = paginator.page(paginator.num_pages)
return paginated_objects
def clean_query_string(request):
clean_query_set = request.GET.copy()
clean_query_set = dict(
(k, v) for k, v in request.GET.items() if k != 'o'
)
try:
del clean_query_set['p']
except:
pass
mstring = []
for key in clean_query_set.keys():
valuelist = request.GET.getlist(key)
mstring.extend(['%s=%s' % (key, val) for val in valuelist])
return '&'.join(mstring)
|
e3505746e0f09c103fd875a24ded85290272cfb9 | django_local_apps/management/commands/docker_exec.py | django_local_apps/management/commands/docker_exec.py | import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"])
Command = DockerExecutor
| import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
print(container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"]))
Command = DockerExecutor
| Print execution result for docker. | Print execution result for docker.
| Python | bsd-3-clause | weijia/django-local-apps,weijia/django-local-apps | import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"])
Command = DockerExecutor
Print execution result for docker. | import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
print(container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"]))
Command = DockerExecutor
| <commit_before>import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"])
Command = DockerExecutor
<commit_msg>Print execution result for docker.<commit_after> | import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
print(container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"]))
Command = DockerExecutor
| import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"])
Command = DockerExecutor
Print execution result for docker.import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
print(container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"]))
Command = DockerExecutor
| <commit_before>import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"])
Command = DockerExecutor
<commit_msg>Print execution result for docker.<commit_after>import logging
import docker
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
log = logging.getLogger()
class DockerExecutor(DjangoCmdBase):
def add_arguments(self, parser):
# Positional arguments
"""
:param: in the args it could be: /usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help
NO need to add '"' as "/usr/local/bin/python /home/richard/codes/django-dev-server/manage.py help"
:return:
"""
parser.add_argument('container_id', nargs=1)
parser.add_argument('work_dir', nargs='?', default=None)
parser.add_argument('path_and_params', nargs='+')
def msg_loop(self):
print(self.options["path_and_params"])
client = docker.from_env()
container = client.containers.get(self.options["container_id"][0])
print(container.exec_run(" ".join(self.options["path_and_params"]), workdir=self.options["work_dir"]))
Command = DockerExecutor
|
c83e2383ea38dc8a0b5ce8e24bdfc2e9c2ba62bd | concourse/scripts/build_with_orca.py | concourse/scripts/build_with_orca.py | #!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
return subprocess.call(["make",
"-j" + str(num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
ciCommon = GporcaCommon()
return subprocess.call(["make",
"-j" + str(ciCommon.num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
| Fix councourse script for gpdb | Fix councourse script for gpdb
| Python | apache-2.0 | ashwinstar/gpdb,kaknikhil/gpdb,xuegang/gpdb,kaknikhil/gpdb,xinzweb/gpdb,lisakowen/gpdb,ahachete/gpdb,randomtask1155/gpdb,janebeckman/gpdb,CraigHarris/gpdb,kaknikhil/gpdb,Chibin/gpdb,tangp3/gpdb,lpetrov-pivotal/gpdb,janebeckman/gpdb,rvs/gpdb,royc1/gpdb,chrishajas/gpdb,ashwinstar/gpdb,0x0FFF/gpdb,50wu/gpdb,zaksoup/gpdb,greenplum-db/gpdb,randomtask1155/gpdb,cjcjameson/gpdb,Chibin/gpdb,xinzweb/gpdb,rvs/gpdb,adam8157/gpdb,zaksoup/gpdb,rvs/gpdb,kaknikhil/gpdb,chrishajas/gpdb,edespino/gpdb,xuegang/gpdb,yuanzhao/gpdb,rubikloud/gpdb,cjcjameson/gpdb,adam8157/gpdb,CraigHarris/gpdb,greenplum-db/gpdb,chrishajas/gpdb,rubikloud/gpdb,cjcjameson/gpdb,randomtask1155/gpdb,edespino/gpdb,edespino/gpdb,ahachete/gpdb,ahachete/gpdb,greenplum-db/gpdb,edespino/gpdb,cjcjameson/gpdb,Quikling/gpdb,0x0FFF/gpdb,Chibin/gpdb,kaknikhil/gpdb,edespino/gpdb,xinzweb/gpdb,lpetrov-pivotal/gpdb,greenplum-db/gpdb,ahachete/gpdb,cjcjameson/gpdb,chrishajas/gpdb,CraigHarris/gpdb,zaksoup/gpdb,janebeckman/gpdb,lpetrov-pivotal/gpdb,randomtask1155/gpdb,Quikling/gpdb,janebeckman/gpdb,kaknikhil/gpdb,tangp3/gpdb,yuanzhao/gpdb,edespino/gpdb,janebeckman/gpdb,zaksoup/gpdb,xuegang/gpdb,ahachete/gpdb,lpetrov-pivotal/gpdb,chrishajas/gpdb,xinzweb/gpdb,yuanzhao/gpdb,cjcjameson/gpdb,jmcatamney/gpdb,Quikling/gpdb,CraigHarris/gpdb,ahachete/gpdb,lintzc/gpdb,lintzc/gpdb,Chibin/gpdb,Chibin/gpdb,janebeckman/gpdb,xuegang/gpdb,rvs/gpdb,lintzc/gpdb,royc1/gpdb,yuanzhao/gpdb,50wu/gpdb,yuanzhao/gpdb,edespino/gpdb,50wu/gpdb,jmcatamney/gpdb,janebeckman/gpdb,rubikloud/gpdb,kaknikhil/gpdb,jmcatamney/gpdb,randomtask1155/gpdb,yuanzhao/gpdb,cjcjameson/gpdb,chrishajas/gpdb,janebeckman/gpdb,lisakowen/gpdb,0x0FFF/gpdb,xinzweb/gpdb,ashwinstar/gpdb,rvs/gpdb,randomtask1155/gpdb,50wu/gpdb,xinzweb/gpdb,ahachete/gpdb,jmcatamney/gpdb,kaknikhil/gpdb,lisakowen/gpdb,lisakowen/gpdb,tangp3/gpdb,janebeckman/gpdb,lintzc/gpdb,ahachete/gpdb,Chibin/gpdb,cjcjameson/gpdb,lpetrov-pivotal/gpdb,royc1/gpdb,kaknikhil/gpdb,zaksoup/gpdb,rubikloud/gpdb,Chibin/gpdb,lintzc/gpdb,ashwinstar/gpdb,rvs/gpdb,50wu/gpdb,cjcjameson/gpdb,yuanzhao/gpdb,royc1/gpdb,CraigHarris/gpdb,lisakowen/gpdb,lpetrov-pivotal/gpdb,50wu/gpdb,Quikling/gpdb,randomtask1155/gpdb,tangp3/gpdb,0x0FFF/gpdb,ashwinstar/gpdb,greenplum-db/gpdb,lisakowen/gpdb,royc1/gpdb,adam8157/gpdb,royc1/gpdb,zaksoup/gpdb,yuanzhao/gpdb,greenplum-db/gpdb,tangp3/gpdb,yuanzhao/gpdb,xuegang/gpdb,rubikloud/gpdb,xuegang/gpdb,tangp3/gpdb,randomtask1155/gpdb,rvs/gpdb,ashwinstar/gpdb,royc1/gpdb,jmcatamney/gpdb,Quikling/gpdb,0x0FFF/gpdb,CraigHarris/gpdb,CraigHarris/gpdb,50wu/gpdb,rubikloud/gpdb,adam8157/gpdb,xuegang/gpdb,rubikloud/gpdb,chrishajas/gpdb,adam8157/gpdb,rvs/gpdb,Chibin/gpdb,adam8157/gpdb,kaknikhil/gpdb,zaksoup/gpdb,0x0FFF/gpdb,cjcjameson/gpdb,xuegang/gpdb,ashwinstar/gpdb,lisakowen/gpdb,Chibin/gpdb,lintzc/gpdb,zaksoup/gpdb,xuegang/gpdb,xinzweb/gpdb,Quikling/gpdb,adam8157/gpdb,janebeckman/gpdb,lintzc/gpdb,edespino/gpdb,lintzc/gpdb,adam8157/gpdb,ashwinstar/gpdb,edespino/gpdb,CraigHarris/gpdb,edespino/gpdb,rubikloud/gpdb,lisakowen/gpdb,tangp3/gpdb,CraigHarris/gpdb,jmcatamney/gpdb,50wu/gpdb,Chibin/gpdb,royc1/gpdb,jmcatamney/gpdb,greenplum-db/gpdb,Quikling/gpdb,greenplum-db/gpdb,0x0FFF/gpdb,lintzc/gpdb,0x0FFF/gpdb,xinzweb/gpdb,lpetrov-pivotal/gpdb,Quikling/gpdb,jmcatamney/gpdb,rvs/gpdb,chrishajas/gpdb,tangp3/gpdb,lpetrov-pivotal/gpdb,yuanzhao/gpdb,Quikling/gpdb,rvs/gpdb,Quikling/gpdb | #!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
return subprocess.call(["make",
"-j" + str(num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
Fix councourse script for gpdb | #!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
ciCommon = GporcaCommon()
return subprocess.call(["make",
"-j" + str(ciCommon.num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
| <commit_before>#!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
return subprocess.call(["make",
"-j" + str(num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Fix councourse script for gpdb<commit_after> | #!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
ciCommon = GporcaCommon()
return subprocess.call(["make",
"-j" + str(ciCommon.num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
return subprocess.call(["make",
"-j" + str(num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
Fix councourse script for gpdb#!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
ciCommon = GporcaCommon()
return subprocess.call(["make",
"-j" + str(ciCommon.num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
| <commit_before>#!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
return subprocess.call(["make",
"-j" + str(num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Fix councourse script for gpdb<commit_after>#!/usr/bin/python2
import optparse
import subprocess
import sys
from gporca import GporcaCommon
def make():
ciCommon = GporcaCommon()
return subprocess.call(["make",
"-j" + str(ciCommon.num_cpus())], cwd="gpdb_src")
def install(output_dir):
subprocess.call(["make", "install"], cwd="gpdb_src")
subprocess.call("mkdir -p " + output_dir, shell=True)
return subprocess.call("cp -r /usr/local/gpdb/* " + output_dir, shell=True)
def main():
parser = optparse.OptionParser()
parser.add_option("--build_type", dest="build_type", default="RELEASE")
parser.add_option("--compiler", dest="compiler")
parser.add_option("--cxxflags", dest="cxxflags")
parser.add_option("--output_dir", dest="output_dir", default="install")
(options, args) = parser.parse_args()
ciCommon = GporcaCommon()
status = ciCommon.install_system_deps()
if status:
return status
for dependency in args:
status = ciCommon.install_dependency(dependency)
if status:
return status
status = ciCommon.configure()
if status:
return status
status = make()
if status:
return status
status = install(options.output_dir)
if status:
return status
return 0
if __name__ == "__main__":
sys.exit(main())
|
d3bcd6426bc323a876ffab6ac46fe117f9e5ab13 | opps/__init__.py | opps/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
| Remove django installed apps init opps | Remove django installed apps init opps
| Python | mit | YACOWS/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
Remove django installed apps init opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
<commit_msg>Remove django installed apps init opps<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
Remove django installed apps init opps#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
settings.INSTALLED_APPS += ('opps.article',
'opps.image',
'opps.channel',
'opps.source',
'redactor',
'tagging',)
settings.REDACTOR_OPTIONS = {'lang': 'en'}
settings.REDACTOR_UPLOAD = 'uploads/'
<commit_msg>Remove django installed apps init opps<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 2)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Opps CMS websites magazines and high-traffic"
__author__ = u"Thiago Avelino"
__credits__ = []
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"BSD"
__copyright__ = u"Copyright 2013, YACOWS"
|
cf5b3e76f89e2430fa482a1fb4a163e6b367928f | opps/__init__.py | opps/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 0)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
| Set new developer version 0.2.1 | Set new developer version 0.2.1
| Python | mit | williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps,opps/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,jeanmask/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 0)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
Set new developer version 0.2.1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 0)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
<commit_msg>Set new developer version 0.2.1<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 0)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
Set new developer version 0.2.1#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 0)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
<commit_msg>Set new developer version 0.2.1<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
VERSION = (0, 2, 1)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Open Source Content Management Platform - CMS for the "
u"magazines, newspappers websites and portals with "
u"high-traffic, using the Django Framework."
__author__ = u"Thiago Avelino"
__credits__ = ['Bruno Rocha']
__email__ = u"opps-developers@googlegroups.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Project"
|
2a80bcdc9fd7ad85888ac9edf53ece8d784db632 | c10kdemo/settings.py | c10kdemo/settings.py | # Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
| # Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'disable_existing_loggers': False,
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
| Make it easier to debug with logging. | Make it easier to debug with logging.
| Python | bsd-3-clause | aaugustin/django-c10k-demo,gogobook/django-c10k-demo,phamvanhung2e123/django-c10k-demo,phamvanhung2e123/django-c10k-demo,gogobook/django-c10k-demo,aaugustin/django-c10k-demo,aaugustin/django-c10k-demo,gogobook/django-c10k-demo,phamvanhung2e123/django-c10k-demo | # Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
Make it easier to debug with logging. | # Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'disable_existing_loggers': False,
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
| <commit_before># Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
<commit_msg>Make it easier to debug with logging.<commit_after> | # Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'disable_existing_loggers': False,
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
| # Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
Make it easier to debug with logging.# Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'disable_existing_loggers': False,
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
| <commit_before># Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
<commit_msg>Make it easier to debug with logging.<commit_after># Django settings for c10kdemo project.
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = True
INSTALLED_APPS = (
'c10ktools',
'gameoflife',
'django.contrib.staticfiles',
)
LOGGING = {
'disable_existing_loggers': False,
'version': 1,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
},
},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'c10kdemo.urls'
SECRET_KEY = os.environ.get('SECRET_KEY', 'whatever')
STATIC_URL = '/static/'
TIME_ZONE = 'Europe/Paris'
WSGI_APPLICATION = 'c10kdemo.wsgi.application'
del os
|
b973c6abe4d325b08278822f85f72ebc1761a825 | changes/constants.py | changes/constants.py | from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
errored = 4
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.errored: 'Errored',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
| from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
| Remove errored state (lets rely on a single failure state) | Remove errored state (lets rely on a single failure state)
| Python | apache-2.0 | bowlofstew/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes | from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
errored = 4
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.errored: 'Errored',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
Remove errored state (lets rely on a single failure state) | from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
| <commit_before>from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
errored = 4
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.errored: 'Errored',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
<commit_msg>Remove errored state (lets rely on a single failure state)<commit_after> | from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
| from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
errored = 4
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.errored: 'Errored',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
Remove errored state (lets rely on a single failure state)from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
| <commit_before>from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
errored = 4
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.errored: 'Errored',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
<commit_msg>Remove errored state (lets rely on a single failure state)<commit_after>from enum import Enum
class Status(Enum):
unknown = 0
queued = 1
in_progress = 2
finished = 3
collecting_results = 4
def __str__(self):
return STATUS_LABELS[self]
class Result(Enum):
unknown = 0
passed = 1
failed = 2
skipped = 3
aborted = 5
timedout = 6
def __str__(self):
return RESULT_LABELS[self]
class Provider(Enum):
unknown = 0
koality = 'koality'
class Cause(Enum):
unknown = 0
manual = 1
push = 2
retry = 3
def __str__(self):
return CAUSE_LABELS[self]
STATUS_LABELS = {
Status.unknown: 'Unknown',
Status.queued: 'Queued',
Status.in_progress: 'In progress',
Status.finished: 'Finished'
}
RESULT_LABELS = {
Result.unknown: 'Unknown',
Result.passed: 'Passed',
Result.failed: 'Failed',
Result.skipped: 'Skipped',
Result.aborted: 'Aborted',
Result.timedout: 'Timed out'
}
CAUSE_LABELS = {
Cause.unknown: 'Unknown',
Cause.manual: 'Manual',
Cause.push: 'Code Push',
Cause.retry: 'Retry',
}
|
b16e9e2f3a349b53505a3f60409b65e139c62356 | alg_prim_minimum_spanning_tree.py | alg_prim_minimum_spanning_tree.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim():
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
start_vertex = 'a'
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree from {}:'.format(start_vertex))
pass
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim(w_graph_d):
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
min_pq = MinPriorityQueue()
key_d = {v: np.inf for v in w_graph_d.keys()}
previous_d = {v: None for v in w_graph_d.keys()}
visited_d = {v: False for v in w_graph_d.keys()}
# Pick an arbitrary vertex as start.
s = w_graph_d.keys()[0]
visited_d[s] = True
key_d[s] = 0
min_pq.insert([key_d[s], s])
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree')
pass
if __name__ == '__main__':
main()
| Write init setting and pick a start | Write init setting and pick a start
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim():
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
start_vertex = 'a'
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree from {}:'.format(start_vertex))
pass
if __name__ == '__main__':
main()
Write init setting and pick a start | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim(w_graph_d):
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
min_pq = MinPriorityQueue()
key_d = {v: np.inf for v in w_graph_d.keys()}
previous_d = {v: None for v in w_graph_d.keys()}
visited_d = {v: False for v in w_graph_d.keys()}
# Pick an arbitrary vertex as start.
s = w_graph_d.keys()[0]
visited_d[s] = True
key_d[s] = 0
min_pq.insert([key_d[s], s])
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree')
pass
if __name__ == '__main__':
main()
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim():
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
start_vertex = 'a'
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree from {}:'.format(start_vertex))
pass
if __name__ == '__main__':
main()
<commit_msg>Write init setting and pick a start<commit_after> | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim(w_graph_d):
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
min_pq = MinPriorityQueue()
key_d = {v: np.inf for v in w_graph_d.keys()}
previous_d = {v: None for v in w_graph_d.keys()}
visited_d = {v: False for v in w_graph_d.keys()}
# Pick an arbitrary vertex as start.
s = w_graph_d.keys()[0]
visited_d[s] = True
key_d[s] = 0
min_pq.insert([key_d[s], s])
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree')
pass
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim():
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
start_vertex = 'a'
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree from {}:'.format(start_vertex))
pass
if __name__ == '__main__':
main()
Write init setting and pick a startfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim(w_graph_d):
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
min_pq = MinPriorityQueue()
key_d = {v: np.inf for v in w_graph_d.keys()}
previous_d = {v: None for v in w_graph_d.keys()}
visited_d = {v: False for v in w_graph_d.keys()}
# Pick an arbitrary vertex as start.
s = w_graph_d.keys()[0]
visited_d[s] = True
key_d[s] = 0
min_pq.insert([key_d[s], s])
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree')
pass
if __name__ == '__main__':
main()
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim():
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
start_vertex = 'a'
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree from {}:'.format(start_vertex))
pass
if __name__ == '__main__':
main()
<commit_msg>Write init setting and pick a start<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from ds_min_priority_queue_tuple import MinPriorityQueue
def prim(w_graph_d):
"""Prim's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): (|V|+|E|)log(|V|).
"""
min_pq = MinPriorityQueue()
key_d = {v: np.inf for v in w_graph_d.keys()}
previous_d = {v: None for v in w_graph_d.keys()}
visited_d = {v: False for v in w_graph_d.keys()}
# Pick an arbitrary vertex as start.
s = w_graph_d.keys()[0]
visited_d[s] = True
key_d[s] = 0
min_pq.insert([key_d[s], s])
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Prim minimum spanning tree')
pass
if __name__ == '__main__':
main()
|
31df2bef09c151479b53ed514c55a600a3862b46 | storage/elasticsearch_storage.py | storage/elasticsearch_storage.py | from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
| import json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
| Add mocks for es storage | Add mocks for es storage
| Python | mpl-2.0 | mitre/multiscanner,MITRECND/multiscanner,MITRECND/multiscanner,awest1339/multiscanner,awest1339/multiscanner,mitre/multiscanner,mitre/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner | from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
Add mocks for es storage | import json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
| <commit_before>from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
<commit_msg>Add mocks for es storage<commit_after> | import json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
| from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
Add mocks for es storageimport json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
| <commit_before>from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
<commit_msg>Add mocks for es storage<commit_after>import json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
|
294f5331a2a6d1f4cd55b87df4409672c6b2c652 | storage/elasticsearch_storage.py | storage/elasticsearch_storage.py | from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
| import json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
| Add mocks for es storage | Add mocks for es storage
| Python | mpl-2.0 | jmlong1027/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,mitre/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,awest1339/multiscanner,mitre/multiscanner,MITRECND/multiscanner,awest1339/multiscanner | from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
Add mocks for es storage | import json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
| <commit_before>from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
<commit_msg>Add mocks for es storage<commit_after> | import json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
| from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
Add mocks for es storageimport json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
| <commit_before>from storage import Storage
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
return 'Report ID'
def get_report(self, report_id):
return {1: {'report': 'data'}}
def delete(self, report_id):
return {'Message': 'deleted'}
<commit_msg>Add mocks for es storage<commit_after>import json
from storage import Storage
TASKS = [
{'task_id': 1, 'task_status': 'Complete', 'report_id': 1},
{'task_id': 2, 'task_status': 'Pending', 'report_id': None},
]
REPORTS = [
{'report_id': 1, 'report': {"/tmp/example.log": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}},
{'report_id': 2, 'report': {"/opt/grep_in_mem.py": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}},
]
class ElasticSearchStorage(Storage):
def __init__(self, config_dict):
self.db = config_dict['database']
self.host = config_dict['host']
self.port = config_dict['port']
self.username = config_dict['username']
self.password = config_dict['password']
self.index = config_dict['index']
self.doc_type = config_dict['doc_type']
def store(self, report):
report_id = REPORTS[-1]['report_id'] + 1
REPORTS.append({'report_id': report_id, 'report': report})
return report_id
def get_report(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return {}
return json.dumps(report[0])
def delete(self, report_id):
report = [report for report in REPORTS if report['report_id'] == report_id]
if len(report) == 0:
return False
abort(HTTP_NOT_FOUND)
REPORTS.remove(report[0])
return True
|
0207b0ea61050d8728e084277b14015bd92a8beb | tests/integration/test_kinesis.py | tests/integration/test_kinesis.py | # Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import itertools
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.service = self.session.get_service('kinesis')
self.endpoint = self.service.get_endpoint('us-east-1')
def test_list_streams(self):
operation = self.service.get_operation('ListStreams')
http, parsed = operation.call(self.endpoint)
self.assertEqual(http.status_code, 200)
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
| # Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client('kinesis', 'us-east-1')
def test_list_streams(self):
parsed = self.client.list_streams()
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
| Switch kinesis integ tests over to client interface | Switch kinesis integ tests over to client interface
| Python | apache-2.0 | pplu/botocore,boto/botocore | # Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import itertools
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.service = self.session.get_service('kinesis')
self.endpoint = self.service.get_endpoint('us-east-1')
def test_list_streams(self):
operation = self.service.get_operation('ListStreams')
http, parsed = operation.call(self.endpoint)
self.assertEqual(http.status_code, 200)
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
Switch kinesis integ tests over to client interface | # Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client('kinesis', 'us-east-1')
def test_list_streams(self):
parsed = self.client.list_streams()
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
| <commit_before># Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import itertools
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.service = self.session.get_service('kinesis')
self.endpoint = self.service.get_endpoint('us-east-1')
def test_list_streams(self):
operation = self.service.get_operation('ListStreams')
http, parsed = operation.call(self.endpoint)
self.assertEqual(http.status_code, 200)
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
<commit_msg>Switch kinesis integ tests over to client interface<commit_after> | # Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client('kinesis', 'us-east-1')
def test_list_streams(self):
parsed = self.client.list_streams()
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
| # Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import itertools
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.service = self.session.get_service('kinesis')
self.endpoint = self.service.get_endpoint('us-east-1')
def test_list_streams(self):
operation = self.service.get_operation('ListStreams')
http, parsed = operation.call(self.endpoint)
self.assertEqual(http.status_code, 200)
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
Switch kinesis integ tests over to client interface# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client('kinesis', 'us-east-1')
def test_list_streams(self):
parsed = self.client.list_streams()
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
| <commit_before># Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import itertools
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.service = self.session.get_service('kinesis')
self.endpoint = self.service.get_endpoint('us-east-1')
def test_list_streams(self):
operation = self.service.get_operation('ListStreams')
http, parsed = operation.call(self.endpoint)
self.assertEqual(http.status_code, 200)
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
<commit_msg>Switch kinesis integ tests over to client interface<commit_after># Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import botocore.session
class TestKinesisListStreams(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client('kinesis', 'us-east-1')
def test_list_streams(self):
parsed = self.client.list_streams()
self.assertIn('StreamNames', parsed)
if __name__ == '__main__':
unittest.main()
|
fba94685ed3934196c4c36557578849aa2c7aeb0 | app.py | app.py | # -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((id, User(id)) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[id].connect(users[adjacent_id])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
| # -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((str(id), User(str(id))) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[str(id)].connect(users[str(adjacent_id)])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
| Convert all the ids to strings | Convert all the ids to strings
| Python | mit | nickfrostatx/infection,nickfrostatx/infection | # -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((id, User(id)) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[id].connect(users[adjacent_id])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
Convert all the ids to strings | # -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((str(id), User(str(id))) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[str(id)].connect(users[str(adjacent_id)])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
| <commit_before># -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((id, User(id)) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[id].connect(users[adjacent_id])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
<commit_msg>Convert all the ids to strings<commit_after> | # -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((str(id), User(str(id))) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[str(id)].connect(users[str(adjacent_id)])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
| # -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((id, User(id)) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[id].connect(users[adjacent_id])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
Convert all the ids to strings# -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((str(id), User(str(id))) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[str(id)].connect(users[str(adjacent_id)])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
| <commit_before># -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((id, User(id)) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[id].connect(users[adjacent_id])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
<commit_msg>Convert all the ids to strings<commit_after># -*- coding: utf-8 -*-
"""A Flask app to visualize the infection algorithm."""
from flask import Flask, request, abort, jsonify
from werkzeug.exceptions import BadRequest
from infection import User, total_infection, limited_infection
app = Flask(__name__)
def load_user_graph():
"""Get the JSON-encoded user graph from the request body."""
json_users = request.get_json()
if json_users is None:
raise BadRequest('You need to supply a JSON user graph.')
try:
users = dict((str(id), User(str(id))) for id in json_users)
for id in json_users:
for adjacent_id in json_users[id]:
users[str(id)].connect(users[str(adjacent_id)])
except KeyError as e:
raise BadRequest('Unknown connection in graph: {0}.'.format(e.args[0]))
except TypeError:
raise BadRequest('Users must be a dictionary of lists.')
return users
@app.route('/infect', methods=['POST'])
def infect():
"""Run the specified infection algorithm on a given user graph."""
users = load_user_graph()
if request.args.get('type') == 'total':
try:
user = users[request.args['user']]
except KeyError:
raise BadRequest('Expected a valid user in param user.')
infected = total_infection(user)
return jsonify({'users': [user.id for user in infected]})
elif request.args.get('type') == 'limited':
return jsonify({'users': []})
raise BadRequest('Expected total or limited from query param type.')
|
b32602f4af337ce9952288fd7080d7f189440f0d | sweettooth/review/urls.py | sweettooth/review/urls.py |
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url('^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url('^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url('^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url(r'^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url(r'^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url(r'^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
| Use raw strings for regexp URLs. | Use raw strings for regexp URLs.
| Python | agpl-3.0 | GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,magcius/sweettooth,magcius/sweettooth,GNOME/extensions-web |
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url('^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url('^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url('^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
Use raw strings for regexp URLs. |
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url(r'^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url(r'^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url(r'^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
| <commit_before>
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url('^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url('^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url('^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
<commit_msg>Use raw strings for regexp URLs.<commit_after> |
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url(r'^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url(r'^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url(r'^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url('^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url('^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url('^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
Use raw strings for regexp URLs.
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url(r'^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url(r'^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url(r'^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
| <commit_before>
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url('^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url('^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url('^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
<commit_msg>Use raw strings for regexp URLs.<commit_after>
from django.conf.urls.defaults import patterns, url
from django.views.generic import ListView
from review import views
from extensions.models import ExtensionVersion, STATUS_LOCKED
urlpatterns = patterns('',
url(r'^$', ListView.as_view(queryset=ExtensionVersion.objects.filter(status=STATUS_LOCKED),
context_object_name="versions",
template_name="review/list.html"), name='review-list'),
url(r'^ajax/v/(?P<pk>\d+)', views.AjaxGetFilesView.as_view(), name='review-ajax-files'),
url(r'^submit/(?P<pk>\d+)', views.SubmitReviewView.as_view(), name='review-submit'),
url(r'^(?P<pk>\d+)', views.ReviewVersionView.as_view(), name='review-version'),
)
|
ffe584928616607be9685e1df4437a9715ce68be | bot.py | bot.py | #!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
# get password
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
| #!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
import os
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
password = os.getenv(u'HCBOT_PASSWORD', u'').strip()
# get password if it's not passed through the environment variable
if password:
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
| Enable passing password through env variable | Enable passing password through env variable
| Python | mit | LipuFei/team-hipchat-bot,LipuFei/team-hipchat-bot | #!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
# get password
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
Enable passing password through env variable | #!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
import os
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
password = os.getenv(u'HCBOT_PASSWORD', u'').strip()
# get password if it's not passed through the environment variable
if password:
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
| <commit_before>#!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
# get password
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
<commit_msg>Enable passing password through env variable<commit_after> | #!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
import os
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
password = os.getenv(u'HCBOT_PASSWORD', u'').strip()
# get password if it's not passed through the environment variable
if password:
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
| #!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
# get password
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
Enable passing password through env variable#!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
import os
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
password = os.getenv(u'HCBOT_PASSWORD', u'').strip()
# get password if it's not passed through the environment variable
if password:
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
| <commit_before>#!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
# get password
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
<commit_msg>Enable passing password through env variable<commit_after>#!/usr/bin/env python
from ConfigParser import ConfigParser
import logging
import getpass
import os
from bot.bot import Bot
logging.basicConfig(level=logging.DEBUG,
format=u'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if __name__ == '__main__':
config_file = u'config.ini'
config = ConfigParser()
config.read([config_file])
password = os.getenv(u'HCBOT_PASSWORD', u'').strip()
# get password if it's not passed through the environment variable
if password:
print u"Please input your Hipchat password:"
password = getpass.getpass()
bot = Bot(config_file, config, password)
bot.start()
|
5c9b98319b3537ef6287bc28353cd72748f9e1a8 | profile_collection/startup/99-bluesky.py | profile_collection/startup/99-bluesky.py | # Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em_ch1, em_ch2, em_ch3, em_ch4]
| # Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em]
gs.TABLE_COLS.append('em_chan21')
gs.PLOT_Y = 'em_ch1'
gs.TEMP_CONTROLLER = cs700
gs.TH_MOTOR = th
gs.TTH_MOTOR = tth
import time as ttime
# We probably already have these imports, but we use them below
# so I'm importing here to be sure.
from databroker import DataBroker as db, get_events
def verify_files_accessible(name, doc):
"This is a brute-force approach. We retrieve all the data."
ttime.sleep(0.1) # Wati for data to be saved.
if name != 'stop':
return
print(" Verifying that run was saved to broker...")
try:
header = db[doc['run_start']]
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
return
else:
print('\x1b[1A\u2713')
print(" Verifying that all data is accessible on the disk...")
try:
list(get_events(header, fill=True))
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
else:
print('\x1b[1A\u2713')
gs.RE.subscribe('stop', verify_files_accessible)
# Alternatively,
# gs.RE(my_scan, verify_files_accessible)
# or
# ct(verify_files_accessible)
| Add multiple settings for bluesky | Add multiple settings for bluesky
- Define a data validator to run at the end of a scan.
- Set up default detectors and plot and table settles for SPEC API.
| Python | bsd-2-clause | NSLS-II-XPD/ipython_ophyd,pavoljuhas/ipython_ophyd,NSLS-II-XPD/ipython_ophyd,pavoljuhas/ipython_ophyd | # Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em_ch1, em_ch2, em_ch3, em_ch4]
Add multiple settings for bluesky
- Define a data validator to run at the end of a scan.
- Set up default detectors and plot and table settles for SPEC API. | # Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em]
gs.TABLE_COLS.append('em_chan21')
gs.PLOT_Y = 'em_ch1'
gs.TEMP_CONTROLLER = cs700
gs.TH_MOTOR = th
gs.TTH_MOTOR = tth
import time as ttime
# We probably already have these imports, but we use them below
# so I'm importing here to be sure.
from databroker import DataBroker as db, get_events
def verify_files_accessible(name, doc):
"This is a brute-force approach. We retrieve all the data."
ttime.sleep(0.1) # Wati for data to be saved.
if name != 'stop':
return
print(" Verifying that run was saved to broker...")
try:
header = db[doc['run_start']]
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
return
else:
print('\x1b[1A\u2713')
print(" Verifying that all data is accessible on the disk...")
try:
list(get_events(header, fill=True))
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
else:
print('\x1b[1A\u2713')
gs.RE.subscribe('stop', verify_files_accessible)
# Alternatively,
# gs.RE(my_scan, verify_files_accessible)
# or
# ct(verify_files_accessible)
| <commit_before># Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em_ch1, em_ch2, em_ch3, em_ch4]
<commit_msg>Add multiple settings for bluesky
- Define a data validator to run at the end of a scan.
- Set up default detectors and plot and table settles for SPEC API.<commit_after> | # Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em]
gs.TABLE_COLS.append('em_chan21')
gs.PLOT_Y = 'em_ch1'
gs.TEMP_CONTROLLER = cs700
gs.TH_MOTOR = th
gs.TTH_MOTOR = tth
import time as ttime
# We probably already have these imports, but we use them below
# so I'm importing here to be sure.
from databroker import DataBroker as db, get_events
def verify_files_accessible(name, doc):
"This is a brute-force approach. We retrieve all the data."
ttime.sleep(0.1) # Wati for data to be saved.
if name != 'stop':
return
print(" Verifying that run was saved to broker...")
try:
header = db[doc['run_start']]
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
return
else:
print('\x1b[1A\u2713')
print(" Verifying that all data is accessible on the disk...")
try:
list(get_events(header, fill=True))
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
else:
print('\x1b[1A\u2713')
gs.RE.subscribe('stop', verify_files_accessible)
# Alternatively,
# gs.RE(my_scan, verify_files_accessible)
# or
# ct(verify_files_accessible)
| # Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em_ch1, em_ch2, em_ch3, em_ch4]
Add multiple settings for bluesky
- Define a data validator to run at the end of a scan.
- Set up default detectors and plot and table settles for SPEC API.# Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em]
gs.TABLE_COLS.append('em_chan21')
gs.PLOT_Y = 'em_ch1'
gs.TEMP_CONTROLLER = cs700
gs.TH_MOTOR = th
gs.TTH_MOTOR = tth
import time as ttime
# We probably already have these imports, but we use them below
# so I'm importing here to be sure.
from databroker import DataBroker as db, get_events
def verify_files_accessible(name, doc):
"This is a brute-force approach. We retrieve all the data."
ttime.sleep(0.1) # Wati for data to be saved.
if name != 'stop':
return
print(" Verifying that run was saved to broker...")
try:
header = db[doc['run_start']]
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
return
else:
print('\x1b[1A\u2713')
print(" Verifying that all data is accessible on the disk...")
try:
list(get_events(header, fill=True))
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
else:
print('\x1b[1A\u2713')
gs.RE.subscribe('stop', verify_files_accessible)
# Alternatively,
# gs.RE(my_scan, verify_files_accessible)
# or
# ct(verify_files_accessible)
| <commit_before># Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em_ch1, em_ch2, em_ch3, em_ch4]
<commit_msg>Add multiple settings for bluesky
- Define a data validator to run at the end of a scan.
- Set up default detectors and plot and table settles for SPEC API.<commit_after># Configure bluesky default detectors with this:
# These are the new "default detectors"
gs.DETS = [em]
gs.TABLE_COLS.append('em_chan21')
gs.PLOT_Y = 'em_ch1'
gs.TEMP_CONTROLLER = cs700
gs.TH_MOTOR = th
gs.TTH_MOTOR = tth
import time as ttime
# We probably already have these imports, but we use them below
# so I'm importing here to be sure.
from databroker import DataBroker as db, get_events
def verify_files_accessible(name, doc):
"This is a brute-force approach. We retrieve all the data."
ttime.sleep(0.1) # Wati for data to be saved.
if name != 'stop':
return
print(" Verifying that run was saved to broker...")
try:
header = db[doc['run_start']]
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
return
else:
print('\x1b[1A\u2713')
print(" Verifying that all data is accessible on the disk...")
try:
list(get_events(header, fill=True))
except Exception as e:
print(" Verification Failed! Error: {0}".format(e))
else:
print('\x1b[1A\u2713')
gs.RE.subscribe('stop', verify_files_accessible)
# Alternatively,
# gs.RE(my_scan, verify_files_accessible)
# or
# ct(verify_files_accessible)
|
55d0fa9b834e6400d48293c80e557c27f5cc4181 | yowsup/structs/protocolentity.py | yowsup/structs/protocolentity.py | from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
self.assertEqual(entity.toProtocolTreeNode(), self.node)
| from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
try:
self.assertEqual(entity.toProtocolTreeNode(), self.node)
except:
print(entity.toProtocolTreeNode())
print("\nNOTEQ\n")
print(self.node)
raise
| Print protocoltreenode on assertion failure | Print protocoltreenode on assertion failure
| Python | mit | biji/yowsup,ongair/yowsup | from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
self.assertEqual(entity.toProtocolTreeNode(), self.node)
Print protocoltreenode on assertion failure | from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
try:
self.assertEqual(entity.toProtocolTreeNode(), self.node)
except:
print(entity.toProtocolTreeNode())
print("\nNOTEQ\n")
print(self.node)
raise
| <commit_before>from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
self.assertEqual(entity.toProtocolTreeNode(), self.node)
<commit_msg>Print protocoltreenode on assertion failure<commit_after> | from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
try:
self.assertEqual(entity.toProtocolTreeNode(), self.node)
except:
print(entity.toProtocolTreeNode())
print("\nNOTEQ\n")
print(self.node)
raise
| from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
self.assertEqual(entity.toProtocolTreeNode(), self.node)
Print protocoltreenode on assertion failurefrom .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
try:
self.assertEqual(entity.toProtocolTreeNode(), self.node)
except:
print(entity.toProtocolTreeNode())
print("\nNOTEQ\n")
print(self.node)
raise
| <commit_before>from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
self.assertEqual(entity.toProtocolTreeNode(), self.node)
<commit_msg>Print protocoltreenode on assertion failure<commit_after>from .protocoltreenode import ProtocolTreeNode
import unittest, time
class ProtocolEntity(object):
__ID_GEN = -1
def __init__(self, tag):
self.tag = tag
def getTag(self):
return self.tag
def isType(self, typ):
return self.tag == typ
def _createProtocolTreeNode(self, attributes, children = None, data = None):
return ProtocolTreeNode(self.getTag(), attributes, children, data)
def _getCurrentTimestamp(self):
return int(time.time())
def _generateId(self):
ProtocolEntity.__ID_GEN += 1
return str(int(time.time())) + "-" + str(ProtocolEntity.__ID_GEN)
def toProtocolTreeNode(self):
pass
@staticmethod
def fromProtocolTreeNode(self, protocolTreeNode):
pass
class ProtocolEntityTest(unittest.TestCase):
def setUp(self):
self.skipTest("override in child classes")
def test_generation(self):
entity = self.ProtocolEntity.fromProtocolTreeNode(self.node)
try:
self.assertEqual(entity.toProtocolTreeNode(), self.node)
except:
print(entity.toProtocolTreeNode())
print("\nNOTEQ\n")
print(self.node)
raise
|
6ec4307173f3eafa87fd063978914bf5816ecb0a | reports/utils.py | reports/utils.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 40},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 80},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
| Increase default top margin to account for two line graph titles. | Increase default top margin to account for two line graph titles.
| Python | bsd-2-clause | cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 40},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
Increase default top margin to account for two line graph titles. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 80},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 40},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
<commit_msg>Increase default top margin to account for two line graph titles.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 80},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 40},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
Increase default top margin to account for two line graph titles.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 80},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 40},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
<commit_msg>Increase default top margin to account for two line graph titles.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
def default_graph_layout_options():
"""Default layout options for all graphs.
"""
return {
'font': {
'color': 'rgba(0, 0, 0, 1)',
# Bootstrap 4 font family.
'family': '-apple-system, BlinkMacSystemFont, "Segoe UI", '
'Roboto, "Helvetica Neue", Arial, sans-serif, '
'"Apple Color Emoji", "Segoe UI Emoji", '
'"Segoe UI Symbol"',
'size': 14,
},
'margin': {'b': 40, 't': 80},
'xaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
},
'yaxis': {
'titlefont': {
'color': 'rgba(0, 0, 0, 0.54)'
}
}
}
def split_graph_output(output):
"""Split out of a Plotly graph in to html and javascript.
"""
html, javascript = output.split('<script')
javascript = '<script' + javascript
return html, javascript
|
24c5497b0c91ce032fb4cf99e79fffc5fa27cb84 | push/management/commands/startbatches.py | push/management/commands/startbatches.py | # coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
| # coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now, is_sent = False)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
| Update batch execute for conditions | Update batch execute for conditions
| Python | apache-2.0 | nnsnodnb/django-mbaas,nnsnodnb/django-mbaas,nnsnodnb/django-mbaas | # coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
Update batch execute for conditions | # coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now, is_sent = False)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
| <commit_before># coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
<commit_msg>Update batch execute for conditions<commit_after> | # coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now, is_sent = False)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
| # coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
Update batch execute for conditions# coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now, is_sent = False)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
| <commit_before># coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
<commit_msg>Update batch execute for conditions<commit_after># coding=utf-8
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from push.models import DeviceTokenModel, NotificationModel
from datetime import datetime
import push_notification
class Command(BaseCommand):
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
def handle(self, *args, **kwargs):
now = '{0:%Y/%m/%d %H:%M}'.format(datetime.now())
notifications = NotificationModel.objects.filter(execute_datetime = now, is_sent = False)
for notification in notifications:
device_tokens = DeviceTokenModel.objects.filter(os_version__gte = notification.os_version,
username = notification.username)
self.prepare_push_notification(notification, device_tokens)
def prepare_push_notification(self, notification, device_tokens):
device_token_lists = []
for item in device_tokens:
device_token_lists.append(item.device_token)
push_notification.execute(device_token_lists, notification)
|
056cb6d5dff67fe029a080abeaba36faee5cff60 | lib/test_util.py | lib/test_util.py | from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
client = HTTPClient()
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
def fetch_documents_from_url(url):
'''
Retrieve newebe documents from a givent url
'''
response = client.fetch(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(path):
fetch_documents_from_url(ROOT_URL + path)
| from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
class NewebeClient(HTTPClient):
'''
Tornado client wrapper to write POST, PUT and delete request faster.
'''
def get(self, url):
return HTTPClient.fetch(self, url)
def post(self, url, body):
return HTTPClient.fetch(self, url, method="POST", body=body)
def put(self, url, body):
return HTTPClient.fetch(self, url, method="PUT", body=body)
def delete(self, url):
return HTTPClient.fetch(self, url, method="DELETE")
def fetch_documents_from_url(self, url):
'''
Retrieve newebe documents from a givent url
'''
response = self.get(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(self, path):
self.fetch_documents_from_url(ROOT_URL + path)
| Make newebe HTTP client for easier requesting | Make newebe HTTP client for easier requesting
| Python | agpl-3.0 | gelnior/newebe,gelnior/newebe,gelnior/newebe,gelnior/newebe | from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
client = HTTPClient()
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
def fetch_documents_from_url(url):
'''
Retrieve newebe documents from a givent url
'''
response = client.fetch(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(path):
fetch_documents_from_url(ROOT_URL + path)
Make newebe HTTP client for easier requesting | from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
class NewebeClient(HTTPClient):
'''
Tornado client wrapper to write POST, PUT and delete request faster.
'''
def get(self, url):
return HTTPClient.fetch(self, url)
def post(self, url, body):
return HTTPClient.fetch(self, url, method="POST", body=body)
def put(self, url, body):
return HTTPClient.fetch(self, url, method="PUT", body=body)
def delete(self, url):
return HTTPClient.fetch(self, url, method="DELETE")
def fetch_documents_from_url(self, url):
'''
Retrieve newebe documents from a givent url
'''
response = self.get(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(self, path):
self.fetch_documents_from_url(ROOT_URL + path)
| <commit_before>from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
client = HTTPClient()
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
def fetch_documents_from_url(url):
'''
Retrieve newebe documents from a givent url
'''
response = client.fetch(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(path):
fetch_documents_from_url(ROOT_URL + path)
<commit_msg>Make newebe HTTP client for easier requesting<commit_after> | from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
class NewebeClient(HTTPClient):
'''
Tornado client wrapper to write POST, PUT and delete request faster.
'''
def get(self, url):
return HTTPClient.fetch(self, url)
def post(self, url, body):
return HTTPClient.fetch(self, url, method="POST", body=body)
def put(self, url, body):
return HTTPClient.fetch(self, url, method="PUT", body=body)
def delete(self, url):
return HTTPClient.fetch(self, url, method="DELETE")
def fetch_documents_from_url(self, url):
'''
Retrieve newebe documents from a givent url
'''
response = self.get(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(self, path):
self.fetch_documents_from_url(ROOT_URL + path)
| from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
client = HTTPClient()
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
def fetch_documents_from_url(url):
'''
Retrieve newebe documents from a givent url
'''
response = client.fetch(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(path):
fetch_documents_from_url(ROOT_URL + path)
Make newebe HTTP client for easier requestingfrom lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
class NewebeClient(HTTPClient):
'''
Tornado client wrapper to write POST, PUT and delete request faster.
'''
def get(self, url):
return HTTPClient.fetch(self, url)
def post(self, url, body):
return HTTPClient.fetch(self, url, method="POST", body=body)
def put(self, url, body):
return HTTPClient.fetch(self, url, method="PUT", body=body)
def delete(self, url):
return HTTPClient.fetch(self, url, method="DELETE")
def fetch_documents_from_url(self, url):
'''
Retrieve newebe documents from a givent url
'''
response = self.get(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(self, path):
self.fetch_documents_from_url(ROOT_URL + path)
| <commit_before>from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
client = HTTPClient()
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
def fetch_documents_from_url(url):
'''
Retrieve newebe documents from a givent url
'''
response = client.fetch(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(path):
fetch_documents_from_url(ROOT_URL + path)
<commit_msg>Make newebe HTTP client for easier requesting<commit_after>from lettuce import world
from tornado.escape import json_decode
from tornado.httpclient import HTTPClient
from newebe.settings import TORNADO_PORT
ROOT_URL = "http://localhost:%d/" % TORNADO_PORT
class NewebeClient(HTTPClient):
'''
Tornado client wrapper to write POST, PUT and delete request faster.
'''
def get(self, url):
return HTTPClient.fetch(self, url)
def post(self, url, body):
return HTTPClient.fetch(self, url, method="POST", body=body)
def put(self, url, body):
return HTTPClient.fetch(self, url, method="PUT", body=body)
def delete(self, url):
return HTTPClient.fetch(self, url, method="DELETE")
def fetch_documents_from_url(self, url):
'''
Retrieve newebe documents from a givent url
'''
response = self.get(url)
assert response.code == 200
assert response.headers["Content-Type"] == "application/json"
world.data = json_decode(response.body)
return world.data["rows"]
def fetch_documents(self, path):
self.fetch_documents_from_url(ROOT_URL + path)
|
be0ca3d4a1759fd68f0360fb3b6fe06cdc4cf7ea | test/test_blacklist_integrity.py | test/test_blacklist_integrity.py | #!/usr/bin/env python3
from glob import glob
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
| #!/usr/bin/env python3
from glob import glob
def test_blacklist_integrity():
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
seen = dict()
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
if line in seen:
raise(ValueError('{0}:{1}:Duplicate entry {2} (also on line {3})'.format(
bl_file, lineno, line.rstrip('\n'), seen[line])))
seen[line] = lineno
| Check blacklist against duplicate entries as well | Check blacklist against duplicate entries as well
Additionally, refactor into a def test_* to run like the other unit tests.
| Python | apache-2.0 | Charcoal-SE/SmokeDetector,Charcoal-SE/SmokeDetector | #!/usr/bin/env python3
from glob import glob
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
Check blacklist against duplicate entries as well
Additionally, refactor into a def test_* to run like the other unit tests. | #!/usr/bin/env python3
from glob import glob
def test_blacklist_integrity():
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
seen = dict()
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
if line in seen:
raise(ValueError('{0}:{1}:Duplicate entry {2} (also on line {3})'.format(
bl_file, lineno, line.rstrip('\n'), seen[line])))
seen[line] = lineno
| <commit_before>#!/usr/bin/env python3
from glob import glob
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
<commit_msg>Check blacklist against duplicate entries as well
Additionally, refactor into a def test_* to run like the other unit tests.<commit_after> | #!/usr/bin/env python3
from glob import glob
def test_blacklist_integrity():
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
seen = dict()
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
if line in seen:
raise(ValueError('{0}:{1}:Duplicate entry {2} (also on line {3})'.format(
bl_file, lineno, line.rstrip('\n'), seen[line])))
seen[line] = lineno
| #!/usr/bin/env python3
from glob import glob
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
Check blacklist against duplicate entries as well
Additionally, refactor into a def test_* to run like the other unit tests.#!/usr/bin/env python3
from glob import glob
def test_blacklist_integrity():
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
seen = dict()
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
if line in seen:
raise(ValueError('{0}:{1}:Duplicate entry {2} (also on line {3})'.format(
bl_file, lineno, line.rstrip('\n'), seen[line])))
seen[line] = lineno
| <commit_before>#!/usr/bin/env python3
from glob import glob
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
<commit_msg>Check blacklist against duplicate entries as well
Additionally, refactor into a def test_* to run like the other unit tests.<commit_after>#!/usr/bin/env python3
from glob import glob
def test_blacklist_integrity():
for bl_file in glob('bad_*.txt') + glob('blacklisted_*.txt'):
with open(bl_file, 'r') as lines:
seen = dict()
for lineno, line in enumerate(lines, 1):
if line.endswith('\r\n'):
raise(ValueError('{0}:{1}:DOS line ending'.format(bl_file, lineno)))
if not line.endswith('\n'):
raise(ValueError('{0}:{1}:No newline'.format(bl_file, lineno)))
if line == '\n':
raise(ValueError('{0}:{1}:Empty line'.format(bl_file, lineno)))
if line in seen:
raise(ValueError('{0}:{1}:Duplicate entry {2} (also on line {3})'.format(
bl_file, lineno, line.rstrip('\n'), seen[line])))
seen[line] = lineno
|
c5e47e61a6b51da99126a9faa4064a621acf017c | tests/handhistory/speed_tests.py | tests/handhistory/speed_tests.py | from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
| from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
| Make handhistory speed test work from root dir | Make handhistory speed test work from root dir
| Python | mit | pokerregion/poker | from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
Make handhistory speed test work from root dir | from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
| <commit_before>from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
<commit_msg>Make handhistory speed test work from root dir<commit_after> | from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
| from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
Make handhistory speed test work from root dirfrom timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
| <commit_before>from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from handhistory import PokerStarsHandHistory; "
f"from stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
<commit_msg>Make handhistory speed test work from root dir<commit_after>from timeit import timeit, repeat
results, single_results = [], []
for handnr in range(1, 5):
single_results.append(
timeit(f'PokerStarsHandHistory(HAND{handnr})', number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
results.extend(repeat(f'PokerStarsHandHistory(HAND{handnr})', repeat=3, number=100000,
setup="from poker.room.pokerstars import PokerStarsHandHistory; "
f"from tests.handhistory.stars_hands import HAND{handnr}")
)
print("Single results average:", sum(single_results) / len(single_results))
print("Repeated results average:", sum(results) / len(results))
|
221bb27796036b348c5cf0fd06a0d57984b3591c | tests/integ/test_basic.py | tests/integ/test_basic.py | """Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
def test_stream_creation(engine):
class Model(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(Model)
| """Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class ProjectionOverlap(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(StreamCreation)
| Rename integration test model names for debugging in console | Rename integration test model names for debugging in console
| Python | mit | numberoverzero/bloop,numberoverzero/bloop | """Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
def test_stream_creation(engine):
class Model(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(Model)
Rename integration test model names for debugging in console | """Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class ProjectionOverlap(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(StreamCreation)
| <commit_before>"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
def test_stream_creation(engine):
class Model(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(Model)
<commit_msg>Rename integration test model names for debugging in console<commit_after> | """Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class ProjectionOverlap(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(StreamCreation)
| """Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
def test_stream_creation(engine):
class Model(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(Model)
Rename integration test model names for debugging in console"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class ProjectionOverlap(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(StreamCreation)
| <commit_before>"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class Model(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(Model)
def test_stream_creation(engine):
class Model(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(Model)
<commit_msg>Rename integration test model names for debugging in console<commit_after>"""Basic scenarios, symmetric tests"""
import pytest
from bloop import (
BaseModel,
Column,
GlobalSecondaryIndex,
Integer,
MissingObjects,
)
from .models import User
def test_crud(engine):
engine.bind(User)
user = User(email="user@domain.com", username="user", profile="first")
engine.save(user)
same_user = User(email=user.email, username=user.username)
engine.load(same_user)
assert user.profile == same_user.profile
same_user.profile = "second"
engine.save(same_user)
engine.load(user, consistent=True)
assert user.profile == same_user.profile
engine.delete(user)
with pytest.raises(MissingObjects) as excinfo:
engine.load(same_user, consistent=True)
assert [same_user] == excinfo.value.objects
def test_projection_overlap(engine):
class ProjectionOverlap(BaseModel):
hash = Column(Integer, hash_key=True)
range = Column(Integer, range_key=True)
other = Column(Integer)
by_other = GlobalSecondaryIndex(projection=["other", "range"], hash_key="other")
# by_other's projected attributes overlap with the model and its own keys
engine.bind(ProjectionOverlap)
def test_stream_creation(engine):
class StreamCreation(BaseModel):
class Meta:
stream = {
"include": ["keys"]
}
hash = Column(Integer, hash_key=True)
engine.bind(StreamCreation)
|
c33b876c664178de92099b6553a6030789bdaaa4 | app/v2/templates/get_templates.py | app/v2/templates/get_templates.py | from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
print(templates)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
| from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
| Remove get all template print | Remove get all template print
| Python | mit | alphagov/notifications-api,alphagov/notifications-api | from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
print(templates)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
Remove get all template print | from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
| <commit_before>from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
print(templates)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
<commit_msg>Remove get all template print<commit_after> | from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
| from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
print(templates)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
Remove get all template printfrom flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
| <commit_before>from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
print(templates)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
<commit_msg>Remove get all template print<commit_after>from flask import jsonify, request
from jsonschema.exceptions import ValidationError
from app import api_user
from app.dao import templates_dao
from app.schema_validation import validate
from app.v2.templates import v2_templates_blueprint
from app.v2.templates.templates_schemas import get_all_template_request
@v2_templates_blueprint.route("/", methods=['GET'])
def get_templates():
validate(request.args.to_dict(), get_all_template_request)
templates = templates_dao.dao_get_all_templates_for_service(api_user.service_id)
return jsonify(
templates=[template.serialize() for template in templates]
), 200
|
f364b55a643c2768f80cb559eb0ec1988aa884c8 | tests/htmlgeneration_test.py | tests/htmlgeneration_test.py | from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
generator = HtmlGenerator()
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
assert_equal(expected_html, generator.for_document(document))
| from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_document(document))
@istest
def html_for_paragraph_uses_p_tag_if_there_is_no_style():
paragraph = openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
])
expected_html = html.element("p", [html.text("Hello")])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_paragraph(paragraph))
| Add test just for paragraph HTML generation | Add test just for paragraph HTML generation
| Python | bsd-2-clause | mwilliamson/wordbridge | from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
generator = HtmlGenerator()
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
assert_equal(expected_html, generator.for_document(document))
Add test just for paragraph HTML generation | from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_document(document))
@istest
def html_for_paragraph_uses_p_tag_if_there_is_no_style():
paragraph = openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
])
expected_html = html.element("p", [html.text("Hello")])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_paragraph(paragraph))
| <commit_before>from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
generator = HtmlGenerator()
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
assert_equal(expected_html, generator.for_document(document))
<commit_msg>Add test just for paragraph HTML generation<commit_after> | from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_document(document))
@istest
def html_for_paragraph_uses_p_tag_if_there_is_no_style():
paragraph = openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
])
expected_html = html.element("p", [html.text("Hello")])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_paragraph(paragraph))
| from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
generator = HtmlGenerator()
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
assert_equal(expected_html, generator.for_document(document))
Add test just for paragraph HTML generationfrom nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_document(document))
@istest
def html_for_paragraph_uses_p_tag_if_there_is_no_style():
paragraph = openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
])
expected_html = html.element("p", [html.text("Hello")])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_paragraph(paragraph))
| <commit_before>from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
generator = HtmlGenerator()
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
assert_equal(expected_html, generator.for_document(document))
<commit_msg>Add test just for paragraph HTML generation<commit_after>from nose.tools import istest, assert_equal
from lxml import etree
from wordbridge import openxml
from wordbridge.htmlgeneration import HtmlGenerator
from wordbridge.html import HtmlBuilder
html = HtmlBuilder()
@istest
def generating_html_for_document_concats_html_for_paragraphs():
document = openxml.document([
openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
]),
openxml.paragraph([
openxml.run([
openxml.text("there")
])
])
])
expected_html = html.fragment([
html.element("p", [html.text("Hello")]),
html.element("p", [html.text("there")])
])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_document(document))
@istest
def html_for_paragraph_uses_p_tag_if_there_is_no_style():
paragraph = openxml.paragraph([
openxml.run([
openxml.text("Hello")
])
])
expected_html = html.element("p", [html.text("Hello")])
generator = HtmlGenerator()
assert_equal(expected_html, generator.for_paragraph(paragraph))
|
f1dd26bfb449f8bba69f93cae02ab904e0a9cba0 | tasks/hello_world.py | tasks/hello_world.py | import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
self.config = json.load(config_file)
self.message = self.config['message']
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
| import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
# Map JSON properties to this object
self.__dict__.update(json.load(config_file))
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
| Copy config settings to task object automatically | Copy config settings to task object automatically
| Python | mit | wpkita/automation-station,wpkita/automation-station,wpkita/automation-station | import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
self.config = json.load(config_file)
self.message = self.config['message']
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
Copy config settings to task object automatically | import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
# Map JSON properties to this object
self.__dict__.update(json.load(config_file))
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
| <commit_before>import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
self.config = json.load(config_file)
self.message = self.config['message']
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
<commit_msg>Copy config settings to task object automatically<commit_after> | import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
# Map JSON properties to this object
self.__dict__.update(json.load(config_file))
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
| import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
self.config = json.load(config_file)
self.message = self.config['message']
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
Copy config settings to task object automaticallyimport json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
# Map JSON properties to this object
self.__dict__.update(json.load(config_file))
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
| <commit_before>import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
self.config = json.load(config_file)
self.message = self.config['message']
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
<commit_msg>Copy config settings to task object automatically<commit_after>import json
import pystache
class HelloWorld():
def __init__(self):
with open('models/hello_world.json') as config_file:
# Map JSON properties to this object
self.__dict__.update(json.load(config_file))
def process(self):
renderer = pystache.Renderer(search_dirs='templates')
with open('bin/hello_world.txt', 'w') as output_file:
output_file.write(renderer.render(self))
|
3f5a6d6cbf959cccddd2cb944eb93cd8f963f4a4 | tools/cr/cr/actions/linux.py | tools/cr/cr/actions/linux.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, ['{CR_BINARY}', '{CR_RUN_ARGUMENTS}'] + arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, '{CR_BINARY}', '{CR_RUN_ARGUMENTS}', *arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
| Fix the run command on Linux | cr: Fix the run command on Linux
TEST=cr run chrome
NOTRY=true
Review URL: https://codereview.chromium.org/105313004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@240638 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | PeterWangIntel/chromium-crosswalk,dednal/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,jaruba/chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,ltilve/chromium,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,M4sse/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,patrickm/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,Just-D/chromium-1,littlstar/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,anirudhSK/chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,jaruba/chromium.src,fujunwei/chromium-crosswalk,M4sse/chromium.src,dushu1203/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,M4sse/chromium.src,hgl888/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,Chilledheart/chromium,anirudhSK/chromium,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,jaruba/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,dednal/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,jaruba/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,dednal/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,Just-D/chromium-1,Chilledheart/chromium,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,Just-D/chromium-1,dednal/chromium.src,Jonekee/chromium.src,ltilve/chromium,ChromiumWebApps/chromium,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,patrickm/chromium.src,Just-D/chromium-1,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,littlstar/chromium.src,ltilve/chromium,anirudhSK/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,dednal/chromium.src,Chilledheart/chromium,Just-D/chromium-1,anirudhSK/chromium,littlstar/chromium.src,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,littlstar/chromium.src,Just-D/chromium-1,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,patrickm/chromium.src,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,Chilledheart/chromium,patrickm/chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,ChromiumWebApps/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,dednal/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,axinging/chromium-crosswalk,patrickm/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium,hgl888/chromium-crosswalk,markYoungH/chromium.src,anirudhSK/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, ['{CR_BINARY}', '{CR_RUN_ARGUMENTS}'] + arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
cr: Fix the run command on Linux
TEST=cr run chrome
NOTRY=true
Review URL: https://codereview.chromium.org/105313004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@240638 0039d316-1c4b-4281-b951-d872f2087c98 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, '{CR_BINARY}', '{CR_RUN_ARGUMENTS}', *arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
| <commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, ['{CR_BINARY}', '{CR_RUN_ARGUMENTS}'] + arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
<commit_msg>cr: Fix the run command on Linux
TEST=cr run chrome
NOTRY=true
Review URL: https://codereview.chromium.org/105313004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@240638 0039d316-1c4b-4281-b951-d872f2087c98<commit_after> | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, '{CR_BINARY}', '{CR_RUN_ARGUMENTS}', *arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, ['{CR_BINARY}', '{CR_RUN_ARGUMENTS}'] + arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
cr: Fix the run command on Linux
TEST=cr run chrome
NOTRY=true
Review URL: https://codereview.chromium.org/105313004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@240638 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, '{CR_BINARY}', '{CR_RUN_ARGUMENTS}', *arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
| <commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, ['{CR_BINARY}', '{CR_RUN_ARGUMENTS}'] + arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
<commit_msg>cr: Fix the run command on Linux
TEST=cr run chrome
NOTRY=true
Review URL: https://codereview.chromium.org/105313004
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@240638 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module to hold linux specific action implementations."""
import cr
class LinuxRunner(cr.Runner):
"""An implementation of cr.Runner for the linux platform.
This supports directly executing the binaries from the output directory.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Kill(self, context, targets, arguments):
# TODO(iancottrell): Think about how to implement this, or even if we should
print '**WARNING** Kill not yet implemented on linux'
def Run(self, context, target, arguments):
cr.Host.Execute(target, '{CR_BINARY}', '{CR_RUN_ARGUMENTS}', *arguments)
def Test(self, context, target, arguments):
self.Run(context, target, arguments)
class LinuxInstaller(cr.Installer):
"""An implementation of cr.Installer for the linux platform.
This does nothing, the linux runner works from the output directory, there
is no need to install anywhere.
"""
@property
def enabled(self):
return cr.LinuxPlatform.GetInstance().is_active
def Uninstall(self, context, targets, arguments):
pass
def Install(self, context, targets, arguments):
pass
def Reinstall(self, context, targets, arguments):
pass
|
556054ecbaa265b8e734860f3393acf3bc3e840e | Lib/importlib/test/import_/util.py | Lib/importlib/test/import_/util.py | import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
| import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
| Use the public API, not a private one. | Use the public API, not a private one.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
Use the public API, not a private one. | import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
| <commit_before>import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
<commit_msg>Use the public API, not a private one.<commit_after> | import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
| import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
Use the public API, not a private one.import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
| <commit_before>import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib._bootstrap.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
<commit_msg>Use the public API, not a private one.<commit_after>import functools
import importlib
import importlib._bootstrap
import unittest
using___import__ = False
def import_(*args, **kwargs):
"""Delegate to allow for injecting different implementations of import."""
if using___import__:
return __import__(*args, **kwargs)
else:
return importlib.__import__(*args, **kwargs)
importlib_only = unittest.skipIf(using___import__, "importlib-specific test")
def mock_path_hook(*entries, importer):
"""A mock sys.path_hooks entry."""
def hook(entry):
if entry not in entries:
raise ImportError
return importer
return hook
|
44f56e0c6b53849f7cb97c595e844b706864a780 | ui/tcmui/debug/middleware.py | ui/tcmui/debug/middleware.py | import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc())
| import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc().replace("\n", "<br>\n"))
| Make debug AJAX tracebacks more readable in HTML. | Make debug AJAX tracebacks more readable in HTML.
| Python | bsd-2-clause | bobsilverberg/moztrap,mozilla/moztrap,shinglyu/moztrap,shinglyu/moztrap,shinglyu/moztrap,mccarrmb/moztrap,mccarrmb/moztrap,mccarrmb/moztrap,shinglyu/moztrap,mozilla/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,bobsilverberg/moztrap,bobsilverberg/moztrap,mozilla/moztrap,mozilla/moztrap,mccarrmb/moztrap,mozilla/moztrap,mccarrmb/moztrap | import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc())
Make debug AJAX tracebacks more readable in HTML. | import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc().replace("\n", "<br>\n"))
| <commit_before>import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc())
<commit_msg>Make debug AJAX tracebacks more readable in HTML.<commit_after> | import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc().replace("\n", "<br>\n"))
| import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc())
Make debug AJAX tracebacks more readable in HTML.import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc().replace("\n", "<br>\n"))
| <commit_before>import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc())
<commit_msg>Make debug AJAX tracebacks more readable in HTML.<commit_after>import logging
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponse
log = logging.getLogger("tcmui.core.middleware.RequestLogMiddleware")
class RequestLogMiddleware(object):
def process_request(self, request):
log.debug(
"%(method)s %(url)s",
{
"method": request.method,
"url": request.get_full_path(),
"request": request
}
)
class AjaxTracebackMiddleware(object):
def __init__(self):
if not settings.DEBUG:
raise MiddlewareNotUsed
def process_exception(self, request, *args, **kwargs):
if request.is_ajax():
import traceback
return HttpResponse(traceback.format_exc().replace("\n", "<br>\n"))
|
6dfed291a253174672d7003700ab770aabcacae4 | backend/breach/models/__init__.py | backend/breach/models/__init__.py | from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
| __all__ = ['victim', 'target', 'round', 'sampleset']
from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
| Add __all__ to models init file | Add __all__ to models init file
| Python | mit | dimriou/rupture,esarafianou/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture | from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
Add __all__ to models init file | __all__ = ['victim', 'target', 'round', 'sampleset']
from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
| <commit_before>from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
<commit_msg>Add __all__ to models init file<commit_after> | __all__ = ['victim', 'target', 'round', 'sampleset']
from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
| from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
Add __all__ to models init file__all__ = ['victim', 'target', 'round', 'sampleset']
from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
| <commit_before>from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
<commit_msg>Add __all__ to models init file<commit_after>__all__ = ['victim', 'target', 'round', 'sampleset']
from .victim import Victim
from .target import Target
from .round import Round
from .sampleset import SampleSet
|
9ee87588b2d6694cafea6415af50110ba5263d3e | bitbots_body_behaviour/src/bitbots_body_behaviour/body/actions/wait.py | bitbots_body_behaviour/src/bitbots_body_behaviour/body/actions/wait.py | # -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time > rospy.get_time():
self.pop()
| # -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time < rospy.get_time():
self.pop()
| Fix Bug in Wait logic | Fix Bug in Wait logic
| Python | bsd-3-clause | bit-bots/bitbots_behaviour | # -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time > rospy.get_time():
self.pop()
Fix Bug in Wait logic | # -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time < rospy.get_time():
self.pop()
| <commit_before># -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time > rospy.get_time():
self.pop()
<commit_msg>Fix Bug in Wait logic<commit_after> | # -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time < rospy.get_time():
self.pop()
| # -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time > rospy.get_time():
self.pop()
Fix Bug in Wait logic# -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time < rospy.get_time():
self.pop()
| <commit_before># -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time > rospy.get_time():
self.pop()
<commit_msg>Fix Bug in Wait logic<commit_after># -*- coding:utf-8 -*-
"""
Wait
^^^^
.. moduleauthor:: Martin Poppinga <1popping@informatik.uni-hamburg.de>
Just waits for something (i.e. that preconditions will be fullfilled)
"""
import rospy
from bitbots_body_behaviour.body.actions.go_to import Stand
from bitbots_stackmachine.abstract_action_module import AbstractActionModule
from humanoid_league_msgs.msg import HeadMode
class Wait(AbstractActionModule):
def __init__(self, connector, args=10):
super(Wait, self).__init__(connector)
self.time = rospy.get_time() + args
def perform(self, connector, reevaluate=False):
if connector.world_model.ball_seen():
connector.blackboard.set_head_duty(HeadMode.BALL_MODE)
self.push(Stand)
if self.time < rospy.get_time():
self.pop()
|
df7c783937d90b74c9b477b100709ed04ac0133e | monolithe/vanilla/sphinx/conf.py | monolithe/vanilla/sphinx/conf.py | # -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| # -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| Use new import for napolean | Use new import for napolean
| Python | bsd-3-clause | nuagenetworks/monolithe,little-dude/monolithe,little-dude/monolithe,nuagenetworks/monolithe,little-dude/monolithe,nuagenetworks/monolithe | # -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
Use new import for napolean | # -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| <commit_before># -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
<commit_msg>Use new import for napolean<commit_after> | # -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| # -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
Use new import for napolean# -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| <commit_before># -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
<commit_msg>Use new import for napolean<commit_after># -*- coding: utf-8 -*-
import sys
import os
import sphinx_rtd_theme
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon']
add_module_names = False
source_suffix = '.rst'
master_doc = 'index'
project = u'vspk'
copyright = u'2015, Nuage Networks'
version = ''
release = ''
exclude_patterns = ['_build', '../vsdk/autogenerates']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# html_theme = "pyramid"
# html_static_path = ['_static']
htmlhelp_basename = '32doc'
html_logo = 'nuage-logo.png'
autodoc_member_order = "groupwise"
autodoc_default_flags = []
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
|
c301e99bbf5b32e3c66d68f422fdfc271390adf4 | txircd/modules/cmode_s.py | txircd/modules/cmode_s.py | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | Make +s actually definitely clear the cdata dictionary | Make +s actually definitely clear the cdata dictionary
| Python | bsd-3-clause | Heufneutje/txircd,ElementalAlchemist/txircd,DesertBus/txircd | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)Make +s actually definitely clear the cdata dictionary | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | <commit_before>from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)<commit_msg>Make +s actually definitely clear the cdata dictionary<commit_after> | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)Make +s actually definitely clear the cdata dictionaryfrom txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | <commit_before>from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput)<commit_msg>Make +s actually definitely clear the cdata dictionary<commit_after>from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) |
dc884cfd49133a9a25cc5ba6276b94dd44d18729 | test/test_general.py | test/test_general.py | import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
jobs = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', jobs, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i == "Exit code: 6"
print i
sys.exit(0)
| import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', job1, ErrWorker)
job2 = ["ls -l ~", "date", "cal"]
apiary.instruct_queen('A2', job2)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i != '' and i != None
| Add jobs to second test queen, add assertions | Add jobs to second test queen, add assertions
| Python | bsd-3-clause | iansmcf/busybees | import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
jobs = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', jobs, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i == "Exit code: 6"
print i
sys.exit(0)
Add jobs to second test queen, add assertions | import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', job1, ErrWorker)
job2 = ["ls -l ~", "date", "cal"]
apiary.instruct_queen('A2', job2)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i != '' and i != None
| <commit_before>import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
jobs = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', jobs, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i == "Exit code: 6"
print i
sys.exit(0)
<commit_msg>Add jobs to second test queen, add assertions<commit_after> | import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', job1, ErrWorker)
job2 = ["ls -l ~", "date", "cal"]
apiary.instruct_queen('A2', job2)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i != '' and i != None
| import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
jobs = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', jobs, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i == "Exit code: 6"
print i
sys.exit(0)
Add jobs to second test queen, add assertionsimport threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', job1, ErrWorker)
job2 = ["ls -l ~", "date", "cal"]
apiary.instruct_queen('A2', job2)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i != '' and i != None
| <commit_before>import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
jobs = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', jobs, ErrWorker)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i == "Exit code: 6"
print i
sys.exit(0)
<commit_msg>Add jobs to second test queen, add assertions<commit_after>import threading
import time
import sys
from busybees import worker
from busybees import hive
import pash
class ErrWorker(worker.Worker):
def work(self, command):
proc = pash.ShellProc()
proc.run(command)
return "Exit code: %s" % proc.get_val('exit_code')
def test_hive():
apiary = hive.Hive()
apiary.create_queen('A1')
apiary.create_queen('A2')
apiary.start_queen('A1')
apiary.start_queen('A2')
job1 = ["iscsiadm -m discovery -t st -p 192.168.88.110",
"iscsiadm -m discovery -t st -p 192.168.90.110",
"iscsiadm -m discovery -t st -p 192.168.88.110"]
apiary.instruct_queen('A1', job1, ErrWorker)
job2 = ["ls -l ~", "date", "cal"]
apiary.instruct_queen('A2', job2)
apiary.kill_queen('A1')
time.sleep(3)
this = apiary.die()
for key in this.keys():
for i in this[key]:
assert i != '' and i != None
|
6d118fed4df334e093840d0bcaad98a06214793b | week1/the_real_deal/sum_matrix.py | week1/the_real_deal/sum_matrix.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
p = [sum(x) for x in n]
print (len(p))
return sum(p)
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
return sum([sum(x) for x in n])
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
| Make it look more pythonic | Make it look more pythonic
| Python | bsd-3-clause | sevgo/Programming101 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
p = [sum(x) for x in n]
print (len(p))
return sum(p)
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
Make it look more pythonic | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
return sum([sum(x) for x in n])
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
p = [sum(x) for x in n]
print (len(p))
return sum(p)
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
<commit_msg>Make it look more pythonic<commit_after> | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
return sum([sum(x) for x in n])
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
p = [sum(x) for x in n]
print (len(p))
return sum(p)
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
Make it look more pythonic#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
return sum([sum(x) for x in n])
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
p = [sum(x) for x in n]
print (len(p))
return sum(p)
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
<commit_msg>Make it look more pythonic<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def sum_matrix(n):
""" Returns a sum of all elements in a
given matrix """
return sum([sum(x) for x in n])
if __name__ == '__main__':
print (sum_matrix([[0, 3, 0], [0, 4, 0], [0, 13, 0]]))
|
b66d8c2d43a28ce6e0824543bd879dc3528e3509 | rest/available-phone-numbers/local-basic-example-1/local-get-basic-example-1.6.x.py | rest/available-phone-numbers/local-basic-example-1/local-get-basic-example-1.6.x.py | # Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
| # Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
# Purchase the phone number
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
| Add a comment about purchasing the phone number | Add a comment about purchasing the phone number | Python | mit | TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets | # Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
Add a comment about purchasing the phone number | # Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
# Purchase the phone number
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
| <commit_before># Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
<commit_msg>Add a comment about purchasing the phone number<commit_after> | # Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
# Purchase the phone number
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
| # Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
Add a comment about purchasing the phone number# Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
# Purchase the phone number
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
| <commit_before># Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
<commit_msg>Add a comment about purchasing the phone number<commit_after># Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = Client(account_sid, auth_token)
numbers = client.available_phone_numbers("US") \
.local \
.list(area_code="510")
# Purchase the phone number
number = client.incoming_phone_numbers \
.create(phone_number=numbers[0].phone_number)
print(number.sid)
|
3b7dcc4d2a19b5ac03eebae35600c25dd038fe33 | tests/test_server.py | tests/test_server.py | import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.reads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
| import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.loads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
| Test against dictionary, not a string | Test against dictionary, not a string
| Python | bsd-2-clause | algorithmic-music-exploration/amen-server,algorithmic-music-exploration/amen-server | import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.reads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
Test against dictionary, not a string | import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.loads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
| <commit_before>import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.reads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
<commit_msg>Test against dictionary, not a string<commit_after> | import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.loads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
| import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.reads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
Test against dictionary, not a stringimport hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.loads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
| <commit_before>import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.reads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
<commit_msg>Test against dictionary, not a string<commit_after>import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
expected = {'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)}
actual = json.loads(handle_post(q, files, get_url, upload))
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
assert expected == actual
|
ca8600faac6b10f5e1bda42d74208f3189efe529 | bin/debug/load_timeline_for_day_and_user.py | bin/debug/load_timeline_for_day_and_user.py | import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
tsdb.save(entry)
| import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| Add option to print debug statements at regular intervals | Add option to print debug statements at regular intervals
Useful to track the progress of the load. This was a change copied from the
production server.
| Python | bsd-3-clause | shankari/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server | import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
tsdb.save(entry)
Add option to print debug statements at regular intervals
Useful to track the progress of the load. This was a change copied from the
production server. | import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| <commit_before>import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
tsdb.save(entry)
<commit_msg>Add option to print debug statements at regular intervals
Useful to track the progress of the load. This was a change copied from the
production server.<commit_after> | import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
tsdb.save(entry)
Add option to print debug statements at regular intervals
Useful to track the progress of the load. This was a change copied from the
production server.import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
| <commit_before>import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
tsdb.save(entry)
<commit_msg>Add option to print debug statements at regular intervals
Useful to track the progress of the load. This was a change copied from the
production server.<commit_after>import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-n", "--make_new", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose",
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if args.make_new:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print "About to save %s" % entry
tsdb.save(entry)
|
249a49d2f174571db22860ebfffc37637cacd9be | xmantissa/plugins/hyperbolaoff.py | xmantissa/plugins/hyperbolaoff.py | from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
loginInterfaces = (),
themes = (HyperbolaTheme('base', 0),)
)
| from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
themes = (HyperbolaTheme('base', 0),)
)
| Revert 5505 - introduced numerous regressions into the test suite | Revert 5505 - introduced numerous regressions into the test suite | Python | mit | twisted/hyperbola,twisted/hyperbola | from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
loginInterfaces = (),
themes = (HyperbolaTheme('base', 0),)
)
Revert 5505 - introduced numerous regressions into the test suite | from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
themes = (HyperbolaTheme('base', 0),)
)
| <commit_before>from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
loginInterfaces = (),
themes = (HyperbolaTheme('base', 0),)
)
<commit_msg>Revert 5505 - introduced numerous regressions into the test suite<commit_after> | from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
themes = (HyperbolaTheme('base', 0),)
)
| from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
loginInterfaces = (),
themes = (HyperbolaTheme('base', 0),)
)
Revert 5505 - introduced numerous regressions into the test suitefrom axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
themes = (HyperbolaTheme('base', 0),)
)
| <commit_before>from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
loginInterfaces = (),
themes = (HyperbolaTheme('base', 0),)
)
<commit_msg>Revert 5505 - introduced numerous regressions into the test suite<commit_after>from axiom import iaxiom, userbase
from xmantissa import website, offering, provisioning
import hyperbola
from hyperbola import hyperbola_model
from hyperbola.hyperbola_theme import HyperbolaTheme
hyperbolaer = provisioning.BenefactorFactory(
name = u'hyperbolaer',
description = u'A wonderful ready to use application named Hyperbola',
benefactorClass = hyperbola_model.HyperbolaBenefactor)
plugin = offering.Offering(
name = u"Hyperbola",
description = u"""
This is the wonderful Hyperbola application. Click me to install.
""",
siteRequirements = (
(userbase.IRealm, userbase.LoginSystem),
(None, website.WebSite)),
appPowerups = (
),
benefactorFactories = (hyperbolaer,),
themes = (HyperbolaTheme('base', 0),)
)
|
389d7e5d131188d5b8a3f9111d9a6a7a96ce8af8 | dmoj/executors/ICK.py | dmoj/executors/ICK.py | from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
return [self.get_command(), '-O', self._code]
| from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
flags = [self.get_command(), '-O', self._code]
if self.problem == self.test_name:
# Do not fail self-test to random compiler bug.
flags.insert(1, '-b')
return flags
| Make Intercal executor not fail to start at times. | Make Intercal executor not fail to start at times.
| Python | agpl-3.0 | DMOJ/judge,DMOJ/judge,DMOJ/judge | from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
return [self.get_command(), '-O', self._code]
Make Intercal executor not fail to start at times. | from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
flags = [self.get_command(), '-O', self._code]
if self.problem == self.test_name:
# Do not fail self-test to random compiler bug.
flags.insert(1, '-b')
return flags
| <commit_before>from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
return [self.get_command(), '-O', self._code]
<commit_msg>Make Intercal executor not fail to start at times.<commit_after> | from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
flags = [self.get_command(), '-O', self._code]
if self.problem == self.test_name:
# Do not fail self-test to random compiler bug.
flags.insert(1, '-b')
return flags
| from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
return [self.get_command(), '-O', self._code]
Make Intercal executor not fail to start at times.from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
flags = [self.get_command(), '-O', self._code]
if self.problem == self.test_name:
# Do not fail self-test to random compiler bug.
flags.insert(1, '-b')
return flags
| <commit_before>from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
return [self.get_command(), '-O', self._code]
<commit_msg>Make Intercal executor not fail to start at times.<commit_after>from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
flags = [self.get_command(), '-O', self._code]
if self.problem == self.test_name:
# Do not fail self-test to random compiler bug.
flags.insert(1, '-b')
return flags
|
34e78e686b967bbc6d3cc64786b5d12757210e87 | Function_blocks_Advanced/EPC_Email_Notification/email_notification.py | Function_blocks_Advanced/EPC_Email_Notification/email_notification.py | #!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + MAIL_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
| #!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + SMTP_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
| Fix in EPC example - incorrect variable name. | Fix in EPC example - incorrect variable name.
| Python | mit | rexcontrols/REXexamples,rexcontrols/REXexamples,rexcontrols/REXexamples,rexcontrols/REXexamples,rexcontrols/REXexamples,rexcontrols/REXexamples,rexcontrols/REXexamples | #!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + MAIL_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
Fix in EPC example - incorrect variable name. | #!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + SMTP_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
| <commit_before>#!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + MAIL_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
<commit_msg>Fix in EPC example - incorrect variable name.<commit_after> | #!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + SMTP_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
| #!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + MAIL_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
Fix in EPC example - incorrect variable name.#!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + SMTP_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
| <commit_before>#!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + MAIL_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
<commit_msg>Fix in EPC example - incorrect variable name.<commit_after>#!/usr/bin/python
import smtplib
#SMTP server settings
SMTP_SERVER = 'smtp.server.com' #e.g. smtp.gmail.com
SMTP_PORT = 587
SMTP_USERNAME = 'yourname@server.com' #your login name, e.g. yourname@gmail.com
SMTP_PASSWORD = 'yourpassword' #CAUTION: This is stored in plain text!
#notification recipient and content
recipient = 'notification@recipient.com'
subject = 'Event notification [REX Control System]'
emailText = 'This is to inform you that an event ocurred.'
emailText = "" + emailText + ""
headers = ["From: " + SMTP_USERNAME,
"Subject: " + subject,
"To: " + recipient,
"MIME-Version: 1.0",
"Content-Type: text/html"]
headers = "\r\n".join(headers)
session = smtplib.SMTP(SMTP_SERVER, SMTP_PORT)
session.ehlo()
session.starttls()
session.ehlo
session.login(SMTP_USERNAME, SMTP_PASSWORD)
session.sendmail(SMTP_USERNAME, recipient, headers + "\r\n\r\n" + emailText)
session.quit()
|
435cdbda7d93287db6dcd652a79324a86becd9b8 | bytecode.py | bytecode.py | class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a/b)
| class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a/b)
| Edit arithmetic operators to use the underlying vm directly | Edit arithmetic operators to use the underlying vm directly
| Python | bsd-3-clause | darbaga/simple_compiler | class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a/b)
Edit arithmetic operators to use the underlying vm directly | class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a/b)
| <commit_before>class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a/b)
<commit_msg>Edit arithmetic operators to use the underlying vm directly<commit_after> | class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a/b)
| class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a/b)
Edit arithmetic operators to use the underlying vm directlyclass BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a/b)
| <commit_before>class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = Pop().execute(machine)
b = Pop().execute(machine)
machine.push(a/b)
<commit_msg>Edit arithmetic operators to use the underlying vm directly<commit_after>class BytecodeBase:
def __init__(self):
# Eventually might want to add subclassed bytecodes here
# Though __subclasses__ works quite well
pass
def execute(self, machine):
pass
class Push(BytecodeBase):
def __init__(self, data):
self.data = data
def execute(self, machine):
machine.push(self.data)
class Pop(BytecodeBase):
def execute(self, machine):
return machine.pop()
class Add(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a+b)
class Sub(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a-b)
class Mul(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a*b)
class Div(BytecodeBase):
def execute(self, machine):
a = machine.pop()
b = machine.pop()
machine.push(a/b)
|
3b14ed7d9ec092baaf10c9f81955dda28508db35 | tests/test_basics.py | tests/test_basics.py | import unittest
from phaseplot import phase_portrait
import matplotlib
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
| import unittest
from phaseplot import phase_portrait
import matplotlib
from matplotlib import pyplot as plt
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
def test_extent(self):
"""Test that the 'box' argument matches extent"""
# See also: issue #1
ai = phase_portrait(lambda(z) : z, box = [-1,1,-1,1])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 1, -1, 1) )
ai = phase_portrait(lambda(z) : z, box = [-1,2,-3,4])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 2, -3, 4) )
| Add test for correct image extent | Add test for correct image extent
| Python | mit | rluce/python-phaseplot | import unittest
from phaseplot import phase_portrait
import matplotlib
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
Add test for correct image extent | import unittest
from phaseplot import phase_portrait
import matplotlib
from matplotlib import pyplot as plt
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
def test_extent(self):
"""Test that the 'box' argument matches extent"""
# See also: issue #1
ai = phase_portrait(lambda(z) : z, box = [-1,1,-1,1])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 1, -1, 1) )
ai = phase_portrait(lambda(z) : z, box = [-1,2,-3,4])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 2, -3, 4) )
| <commit_before>import unittest
from phaseplot import phase_portrait
import matplotlib
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
<commit_msg>Add test for correct image extent<commit_after> | import unittest
from phaseplot import phase_portrait
import matplotlib
from matplotlib import pyplot as plt
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
def test_extent(self):
"""Test that the 'box' argument matches extent"""
# See also: issue #1
ai = phase_portrait(lambda(z) : z, box = [-1,1,-1,1])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 1, -1, 1) )
ai = phase_portrait(lambda(z) : z, box = [-1,2,-3,4])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 2, -3, 4) )
| import unittest
from phaseplot import phase_portrait
import matplotlib
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
Add test for correct image extentimport unittest
from phaseplot import phase_portrait
import matplotlib
from matplotlib import pyplot as plt
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
def test_extent(self):
"""Test that the 'box' argument matches extent"""
# See also: issue #1
ai = phase_portrait(lambda(z) : z, box = [-1,1,-1,1])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 1, -1, 1) )
ai = phase_portrait(lambda(z) : z, box = [-1,2,-3,4])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 2, -3, 4) )
| <commit_before>import unittest
from phaseplot import phase_portrait
import matplotlib
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
<commit_msg>Add test for correct image extent<commit_after>import unittest
from phaseplot import phase_portrait
import matplotlib
from matplotlib import pyplot as plt
class TestBasics(unittest.TestCase):
"""A collection of basic tests with no particular theme"""
def test_retval(self):
"""phase_portrait returns an AxesImage instance"""
def somefun(z): return z*z + 1
retval = phase_portrait(somefun)
self.assertIsInstance(retval, matplotlib.image.AxesImage)
def test_extent(self):
"""Test that the 'box' argument matches extent"""
# See also: issue #1
ai = phase_portrait(lambda(z) : z, box = [-1,1,-1,1])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 1, -1, 1) )
ai = phase_portrait(lambda(z) : z, box = [-1,2,-3,4])
# extent = (left, right, bottom, top)
extent = ai.get_extent()
self.assertEqual( extent, (-1, 2, -3, 4) )
|
233e5b2f48ae567f50843dc3b8b4301a21c12b71 | cloud_notes/templatetags/markdown_filters.py | cloud_notes/templatetags/markdown_filters.py | from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown) | from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['blockquote', 'pre', 'p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
| Add pre tag to cloud notes | Add pre tag to cloud notes
| Python | apache-2.0 | kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2 | from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)Add pre tag to cloud notes | from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['blockquote', 'pre', 'p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
| <commit_before>from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)<commit_msg>Add pre tag to cloud notes<commit_after> | from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['blockquote', 'pre', 'p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
| from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)Add pre tag to cloud notesfrom django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['blockquote', 'pre', 'p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
| <commit_before>from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)<commit_msg>Add pre tag to cloud notes<commit_after>from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['blockquote', 'pre', 'p', 'br', 'hr', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
|
c82a6a9dce1036c94a6e4ac9d09196822935116f | doc/conf.py | doc/conf.py | # -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
| # -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
html_domain_indices = ['py-modindex']
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
| Use only the python module index, but not the one from the (broken) pyqt4 extension | Use only the python module index, but not the one from the (broken) pyqt4 extension
| Python | lgpl-2.1 | mulkieran/pyudev,mulkieran/pyudev,deepakkapoor624/pyudev,deepakkapoor624/pyudev,pyudev/pyudev,mulkieran/pyudev | # -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
Use only the python module index, but not the one from the (broken) pyqt4 extension | # -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
html_domain_indices = ['py-modindex']
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
| <commit_before># -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
<commit_msg>Use only the python module index, but not the one from the (broken) pyqt4 extension<commit_after> | # -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
html_domain_indices = ['py-modindex']
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
| # -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
Use only the python module index, but not the one from the (broken) pyqt4 extension# -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
html_domain_indices = ['py-modindex']
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
| <commit_before># -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
<commit_msg>Use only the python module index, but not the one from the (broken) pyqt4 extension<commit_after># -*- coding: utf-8 -*-
import sys, os
import pyudev
needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx',
'sphinxcontrib.pyqt4', 'sphinxcontrib.issuetracker']
master_doc = 'index'
exclude_patterns = ['_build/*']
source_suffix = '.rst'
project = u'pyudev'
copyright = u'2010, Sebastian Wiesner'
version = '.'.join(pyudev.__version__.split('.')[:2])
release = pyudev.__version__
html_theme = 'default'
html_static_path = []
html_domain_indices = ['py-modindex']
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
issuetracker = 'github'
issuetracker_user = u'lunaryorn'
def setup(app):
from sphinx.ext.autodoc import cut_lines
app.connect('autodoc-process-docstring', cut_lines(2, what=['module']))
|
7dacd28007097f83713b08d8b768d8ba8f6629d2 | src/unittest/python/stack_configuration/stack_configuration_tests.py | src/unittest/python/stack_configuration/stack_configuration_tests.py | import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'foo': {'template-url': 'foo.json'}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['foo'], StackConfig))
self.assertEqual('foo.json', config.stacks['foo'].template_url)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'foo': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
| import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'any-stack': {'template-url': 'foo.json', 'tags': {'any-tag': 'any-tag-value'}, 'parameters': {'any-parameter': 'any-value'}}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['any-stack'], StackConfig))
self.assertEqual('foo.json', config.stacks['any-stack'].template_url)
self.assertEqual({'any-tag': 'any-tag-value'}, config.stacks['any-stack'].tags)
self.assertEqual({'any-parameter': 'any-value'}, config.stacks['any-stack'].parameters)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'any-stack': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
| Make test variables more descriptive | refactor: Make test variables more descriptive
| Python | apache-2.0 | ImmobilienScout24/cfn-sphere,cfn-sphere/cfn-sphere,marco-hoyer/cfn-sphere,cfn-sphere/cfn-sphere,cfn-sphere/cfn-sphere | import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'foo': {'template-url': 'foo.json'}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['foo'], StackConfig))
self.assertEqual('foo.json', config.stacks['foo'].template_url)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'foo': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
refactor: Make test variables more descriptive | import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'any-stack': {'template-url': 'foo.json', 'tags': {'any-tag': 'any-tag-value'}, 'parameters': {'any-parameter': 'any-value'}}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['any-stack'], StackConfig))
self.assertEqual('foo.json', config.stacks['any-stack'].template_url)
self.assertEqual({'any-tag': 'any-tag-value'}, config.stacks['any-stack'].tags)
self.assertEqual({'any-parameter': 'any-value'}, config.stacks['any-stack'].parameters)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'any-stack': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
| <commit_before>import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'foo': {'template-url': 'foo.json'}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['foo'], StackConfig))
self.assertEqual('foo.json', config.stacks['foo'].template_url)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'foo': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
<commit_msg>refactor: Make test variables more descriptive<commit_after> | import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'any-stack': {'template-url': 'foo.json', 'tags': {'any-tag': 'any-tag-value'}, 'parameters': {'any-parameter': 'any-value'}}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['any-stack'], StackConfig))
self.assertEqual('foo.json', config.stacks['any-stack'].template_url)
self.assertEqual({'any-tag': 'any-tag-value'}, config.stacks['any-stack'].tags)
self.assertEqual({'any-parameter': 'any-value'}, config.stacks['any-stack'].parameters)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'any-stack': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
| import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'foo': {'template-url': 'foo.json'}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['foo'], StackConfig))
self.assertEqual('foo.json', config.stacks['foo'].template_url)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'foo': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
refactor: Make test variables more descriptiveimport unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'any-stack': {'template-url': 'foo.json', 'tags': {'any-tag': 'any-tag-value'}, 'parameters': {'any-parameter': 'any-value'}}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['any-stack'], StackConfig))
self.assertEqual('foo.json', config.stacks['any-stack'].template_url)
self.assertEqual({'any-tag': 'any-tag-value'}, config.stacks['any-stack'].tags)
self.assertEqual({'any-parameter': 'any-value'}, config.stacks['any-stack'].parameters)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'any-stack': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
| <commit_before>import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'foo': {'template-url': 'foo.json'}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['foo'], StackConfig))
self.assertEqual('foo.json', config.stacks['foo'].template_url)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'foo': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
<commit_msg>refactor: Make test variables more descriptive<commit_after>import unittest2
from cfn_sphere.stack_configuration import Config, StackConfig, NoConfigException
class ConfigTests(unittest2.TestCase):
def test_properties_parsing(self):
config = Config(config_dict={'region': 'eu-west-1', 'stacks': {'any-stack': {'template-url': 'foo.json', 'tags': {'any-tag': 'any-tag-value'}, 'parameters': {'any-parameter': 'any-value'}}}})
self.assertEqual('eu-west-1', config.region)
self.assertEqual(1, len(config.stacks.keys()))
self.assertTrue(isinstance(config.stacks['any-stack'], StackConfig))
self.assertEqual('foo.json', config.stacks['any-stack'].template_url)
self.assertEqual({'any-tag': 'any-tag-value'}, config.stacks['any-stack'].tags)
self.assertEqual({'any-parameter': 'any-value'}, config.stacks['any-stack'].parameters)
def test_raises_exception_if_no_region_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'foo': '', 'stacks': {'any-stack': {'template': 'foo.json'}}})
def test_raises_exception_if_no_stacks_key(self):
with self.assertRaises(NoConfigException):
Config(config_dict={'region': 'eu-west-1'})
|
5c60aad725b0b98008ee467c5130931339c12d48 | os_client_config/cloud_config.py | os_client_config/cloud_config.py | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
| # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
def __eq__(self, other):
return (self.name == other.name and self.region == other.region
and self.config == other.config)
| Add an equality method for CloudConfig | Add an equality method for CloudConfig
In order to track if a config has changed, we need to be able to compare
the CloudConfig objects for equality.
Change-Id: Icdd9acede81bc5fba60d877194048e24a62c9e5d
| Python | apache-2.0 | stackforge/python-openstacksdk,redhat-openstack/os-client-config,openstack/python-openstacksdk,dtroyer/python-openstacksdk,openstack/os-client-config,dtroyer/os-client-config,stackforge/python-openstacksdk,dtroyer/python-openstacksdk,openstack/python-openstacksdk,switch-ch/os-client-config | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
Add an equality method for CloudConfig
In order to track if a config has changed, we need to be able to compare
the CloudConfig objects for equality.
Change-Id: Icdd9acede81bc5fba60d877194048e24a62c9e5d | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
def __eq__(self, other):
return (self.name == other.name and self.region == other.region
and self.config == other.config)
| <commit_before># Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
<commit_msg>Add an equality method for CloudConfig
In order to track if a config has changed, we need to be able to compare
the CloudConfig objects for equality.
Change-Id: Icdd9acede81bc5fba60d877194048e24a62c9e5d<commit_after> | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
def __eq__(self, other):
return (self.name == other.name and self.region == other.region
and self.config == other.config)
| # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
Add an equality method for CloudConfig
In order to track if a config has changed, we need to be able to compare
the CloudConfig objects for equality.
Change-Id: Icdd9acede81bc5fba60d877194048e24a62c9e5d# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
def __eq__(self, other):
return (self.name == other.name and self.region == other.region
and self.config == other.config)
| <commit_before># Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
<commit_msg>Add an equality method for CloudConfig
In order to track if a config has changed, we need to be able to compare
the CloudConfig objects for equality.
Change-Id: Icdd9acede81bc5fba60d877194048e24a62c9e5d<commit_after># Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class CloudConfig(object):
def __init__(self, name, region, config):
self.name = name
self.region = region
self.config = config
def __getattr__(self, key):
"""Return arbitrary attributes."""
if key.startswith('os_'):
key = key[3:]
if key in [attr.replace('-', '_') for attr in self.config]:
return self.config[key]
else:
return None
def __iter__(self):
return self.config.__iter__()
def __eq__(self, other):
return (self.name == other.name and self.region == other.region
and self.config == other.config)
|
f879bf6304fcd31e32b55c40462dce06ff859410 | turbasen/settings.py | turbasen/settings.py | import os
from .cache import DummyCache
class Settings:
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
| import os
from .cache import DummyCache
class MetaSettings(type):
"""Implements reprentation for the Settings singleton, displaying all settings and values"""
def __repr__(cls):
settings = [
'%s=%s' % (name, getattr(cls, name))
for name in dir(cls)
if not name.startswith('_')
]
return '<%s: %s>' % (cls.__name__, ', '.join(settings))
class Settings(metaclass=MetaSettings):
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
| Implement repr for Settings class | Implement repr for Settings class
| Python | mit | Turbasen/turbasen.py | import os
from .cache import DummyCache
class Settings:
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
Implement repr for Settings class | import os
from .cache import DummyCache
class MetaSettings(type):
"""Implements reprentation for the Settings singleton, displaying all settings and values"""
def __repr__(cls):
settings = [
'%s=%s' % (name, getattr(cls, name))
for name in dir(cls)
if not name.startswith('_')
]
return '<%s: %s>' % (cls.__name__, ', '.join(settings))
class Settings(metaclass=MetaSettings):
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
| <commit_before>import os
from .cache import DummyCache
class Settings:
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
<commit_msg>Implement repr for Settings class<commit_after> | import os
from .cache import DummyCache
class MetaSettings(type):
"""Implements reprentation for the Settings singleton, displaying all settings and values"""
def __repr__(cls):
settings = [
'%s=%s' % (name, getattr(cls, name))
for name in dir(cls)
if not name.startswith('_')
]
return '<%s: %s>' % (cls.__name__, ', '.join(settings))
class Settings(metaclass=MetaSettings):
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
| import os
from .cache import DummyCache
class Settings:
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
Implement repr for Settings classimport os
from .cache import DummyCache
class MetaSettings(type):
"""Implements reprentation for the Settings singleton, displaying all settings and values"""
def __repr__(cls):
settings = [
'%s=%s' % (name, getattr(cls, name))
for name in dir(cls)
if not name.startswith('_')
]
return '<%s: %s>' % (cls.__name__, ', '.join(settings))
class Settings(metaclass=MetaSettings):
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
| <commit_before>import os
from .cache import DummyCache
class Settings:
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
<commit_msg>Implement repr for Settings class<commit_after>import os
from .cache import DummyCache
class MetaSettings(type):
"""Implements reprentation for the Settings singleton, displaying all settings and values"""
def __repr__(cls):
settings = [
'%s=%s' % (name, getattr(cls, name))
for name in dir(cls)
if not name.startswith('_')
]
return '<%s: %s>' % (cls.__name__, ', '.join(settings))
class Settings(metaclass=MetaSettings):
ENDPOINT_URL = os.environ.get('ENDPOINT_URL', 'https://api.nasjonalturbase.no')
LIMIT = 20
CACHE = DummyCache()
CACHE_LOOKUP_PERIOD = 60 * 60 * 24
CACHE_GET_PERIOD = 60 * 60 * 24 * 30
ETAG_CACHE_PERIOD = 60 * 60
API_KEY = os.environ.get('API_KEY', '')
def configure(**settings):
for key, value in settings.items():
# Strip any trailing slash in ENDPOINT_URL
if key == 'ENDPOINT_URL':
value = value.rstrip('/')
setattr(Settings, key, value)
|
02d971ae2533336ba0625561a70c968b9d71b936 | PublicWebServicesAPI_AND_servercommandScripts/addInfoToCSVreport.py | PublicWebServicesAPI_AND_servercommandScripts/addInfoToCSVreport.py | #!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1 | #!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard input
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1 | Fix incorrect wording in addInfoToCSVReport.py | Update: Fix incorrect wording in addInfoToCSVReport.py
| Python | mit | PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples | #!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1Update: Fix incorrect wording in addInfoToCSVReport.py | #!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard input
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1 | <commit_before>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1<commit_msg>Update: Fix incorrect wording in addInfoToCSVReport.py<commit_after> | #!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard input
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1 | #!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1Update: Fix incorrect wording in addInfoToCSVReport.py#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard input
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1 | <commit_before>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1<commit_msg>Update: Fix incorrect wording in addInfoToCSVReport.py<commit_after>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard input
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1 |
1272a93f4ea5b35b9b4030d984264b7e7fb7969e | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5.dev0'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5'
| Update dsub version to 0.2.5. | Update dsub version to 0.2.5.
PiperOrigin-RevId: 232755945
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5.dev0'
Update dsub version to 0.2.5.
PiperOrigin-RevId: 232755945 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5.dev0'
<commit_msg>Update dsub version to 0.2.5.
PiperOrigin-RevId: 232755945<commit_after> | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5.dev0'
Update dsub version to 0.2.5.
PiperOrigin-RevId: 232755945# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5.dev0'
<commit_msg>Update dsub version to 0.2.5.
PiperOrigin-RevId: 232755945<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.2.5'
|
b7bdd73fdfe0036ceb0a423e3d2619a8a4a35a1f | dsub/_dsub_version.py | dsub/_dsub_version.py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7.dev0'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7'
| Update dsub version to 0.4.7 | Update dsub version to 0.4.7
PiperOrigin-RevId: 449501976
| Python | apache-2.0 | DataBiosphere/dsub,DataBiosphere/dsub | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7.dev0'
Update dsub version to 0.4.7
PiperOrigin-RevId: 449501976 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7.dev0'
<commit_msg>Update dsub version to 0.4.7
PiperOrigin-RevId: 449501976<commit_after> | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7'
| # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7.dev0'
Update dsub version to 0.4.7
PiperOrigin-RevId: 449501976# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7'
| <commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7.dev0'
<commit_msg>Update dsub version to 0.4.7
PiperOrigin-RevId: 449501976<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.7'
|
fbbe736b649a85cddf773548b895ccaa9ead8c67 | docker/nvidia/setup_nvidia_docker_compose.py | docker/nvidia/setup_nvidia_docker_compose.py | #!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
response = requests.get('http://localhost:3476/docker/cli/json')
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
| #!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
try:
response = requests.get('http://localhost:3476/docker/cli/json')
except requests.exceptions.ConnectionError, e:
print('Cannot connect to the nvidia docker plugin. Did you install it? Is the plugin daemon running on this host?')
raise e
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
| Add error handling in case nvidia plugin daemon is not running | Add error handling in case nvidia plugin daemon is not running
| Python | bsd-3-clause | ORNL-CEES/DataTransferKit,dalg24/DataTransferKit,amccaskey/DataTransferKit,dalg24/DataTransferKit,ORNL-CEES/DataTransferKit,Rombur/DataTransferKit,dalg24/DataTransferKit,Rombur/DataTransferKit,dalg24/DataTransferKit,amccaskey/DataTransferKit,ORNL-CEES/DataTransferKit,ORNL-CEES/DataTransferKit,Rombur/DataTransferKit,amccaskey/DataTransferKit,Rombur/DataTransferKit,amccaskey/DataTransferKit | #!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
response = requests.get('http://localhost:3476/docker/cli/json')
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
Add error handling in case nvidia plugin daemon is not running | #!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
try:
response = requests.get('http://localhost:3476/docker/cli/json')
except requests.exceptions.ConnectionError, e:
print('Cannot connect to the nvidia docker plugin. Did you install it? Is the plugin daemon running on this host?')
raise e
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
| <commit_before>#!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
response = requests.get('http://localhost:3476/docker/cli/json')
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
<commit_msg>Add error handling in case nvidia plugin daemon is not running<commit_after> | #!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
try:
response = requests.get('http://localhost:3476/docker/cli/json')
except requests.exceptions.ConnectionError, e:
print('Cannot connect to the nvidia docker plugin. Did you install it? Is the plugin daemon running on this host?')
raise e
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
| #!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
response = requests.get('http://localhost:3476/docker/cli/json')
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
Add error handling in case nvidia plugin daemon is not running#!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
try:
response = requests.get('http://localhost:3476/docker/cli/json')
except requests.exceptions.ConnectionError, e:
print('Cannot connect to the nvidia docker plugin. Did you install it? Is the plugin daemon running on this host?')
raise e
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
| <commit_before>#!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
response = requests.get('http://localhost:3476/docker/cli/json')
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
<commit_msg>Add error handling in case nvidia plugin daemon is not running<commit_after>#!/usr/bin/env python
import requests
import yaml
# query nvidia docker plugin for the command-line parameters to use with the
# `docker run` command
try:
response = requests.get('http://localhost:3476/docker/cli/json')
except requests.exceptions.ConnectionError, e:
print('Cannot connect to the nvidia docker plugin. Did you install it? Is the plugin daemon running on this host?')
raise e
docker_cli_params = response.json()
devices = docker_cli_params['Devices']
volumes = docker_cli_params['Volumes']
# load the template docker compose file to extend the configuration of our
# DTK development container and make it GPU-aware
with open('docker-compose.template.yml', 'r') as fin:
config = yaml.load(fin)
# add devices and volumes configuration options to the template
config['services']['dtk_dev']['devices'] = devices
config['services']['dtk_dev']['volumes'] = volumes
config['volumes'] = {}
config['volumes'][volumes[0].split(':')[0]] = {'external': True}
# write out the extension of the basic DTK docker compose file
with open('docker-compose.yml', 'w') as fout:
fout.write(yaml.safe_dump(config, default_flow_style=False))
|
4503294985c45e02e284dc3ab7dac4631856c126 | rainforest_makers/urls.py | rainforest_makers/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| Add Media url/root to settings | Add Media url/root to settings
| Python | mit | bjorncooley/rainforest_makers,bjorncooley/rainforest_makers | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)
Add Media url/root to settings | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| <commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)
<commit_msg>Add Media url/root to settings<commit_after> | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)
Add Media url/root to settingsfrom django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| <commit_before>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)
<commit_msg>Add Media url/root to settings<commit_after>from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'rainforest_makers.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('spirit.urls', namespace="spirit", app_name="spirit")),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
7092293a569c382dac4f2f9ac69b879ea4b500d1 | django_prometheus/db/backends/mysql/base.py | django_prometheus/db/backends/mysql/base.py | from django_prometheus.db.common import DatabaseWrapperMixin
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
| from django_prometheus.db.common import (
DatabaseWrapperMixin, ExportingCursorWrapper)
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
def create_cursor(self):
cursor = self.connection.cursor()
CursorWrapper = ExportingCursorWrapper(
self.CURSOR_CLASS, self.alias, self.vendor)
return CursorWrapper(cursor)
| Use the proper API to Python-MySQL. | Use the proper API to Python-MySQL.
The common mixin used for other databases uses an API established
across databases, but Python-MySQL differs. This was broken during the
refactoring in 432f1874ffde0ad120aa79e568086a1731d22aeb.
Fixes #24
| Python | apache-2.0 | korfuri/django-prometheus,obytes/django-prometheus,korfuri/django-prometheus,obytes/django-prometheus | from django_prometheus.db.common import DatabaseWrapperMixin
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
Use the proper API to Python-MySQL.
The common mixin used for other databases uses an API established
across databases, but Python-MySQL differs. This was broken during the
refactoring in 432f1874ffde0ad120aa79e568086a1731d22aeb.
Fixes #24 | from django_prometheus.db.common import (
DatabaseWrapperMixin, ExportingCursorWrapper)
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
def create_cursor(self):
cursor = self.connection.cursor()
CursorWrapper = ExportingCursorWrapper(
self.CURSOR_CLASS, self.alias, self.vendor)
return CursorWrapper(cursor)
| <commit_before>from django_prometheus.db.common import DatabaseWrapperMixin
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
<commit_msg>Use the proper API to Python-MySQL.
The common mixin used for other databases uses an API established
across databases, but Python-MySQL differs. This was broken during the
refactoring in 432f1874ffde0ad120aa79e568086a1731d22aeb.
Fixes #24<commit_after> | from django_prometheus.db.common import (
DatabaseWrapperMixin, ExportingCursorWrapper)
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
def create_cursor(self):
cursor = self.connection.cursor()
CursorWrapper = ExportingCursorWrapper(
self.CURSOR_CLASS, self.alias, self.vendor)
return CursorWrapper(cursor)
| from django_prometheus.db.common import DatabaseWrapperMixin
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
Use the proper API to Python-MySQL.
The common mixin used for other databases uses an API established
across databases, but Python-MySQL differs. This was broken during the
refactoring in 432f1874ffde0ad120aa79e568086a1731d22aeb.
Fixes #24from django_prometheus.db.common import (
DatabaseWrapperMixin, ExportingCursorWrapper)
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
def create_cursor(self):
cursor = self.connection.cursor()
CursorWrapper = ExportingCursorWrapper(
self.CURSOR_CLASS, self.alias, self.vendor)
return CursorWrapper(cursor)
| <commit_before>from django_prometheus.db.common import DatabaseWrapperMixin
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
<commit_msg>Use the proper API to Python-MySQL.
The common mixin used for other databases uses an API established
across databases, but Python-MySQL differs. This was broken during the
refactoring in 432f1874ffde0ad120aa79e568086a1731d22aeb.
Fixes #24<commit_after>from django_prometheus.db.common import (
DatabaseWrapperMixin, ExportingCursorWrapper)
from django.db.backends.mysql import base
class DatabaseFeatures(base.DatabaseFeatures):
"""Our database has the exact same features as the base one."""
pass
class DatabaseWrapper(DatabaseWrapperMixin, base.DatabaseWrapper):
CURSOR_CLASS = base.CursorWrapper
def create_cursor(self):
cursor = self.connection.cursor()
CursorWrapper = ExportingCursorWrapper(
self.CURSOR_CLASS, self.alias, self.vendor)
return CursorWrapper(cursor)
|
3c982cd4d7742600d5785f8620d0b982d0fd741e | sensors/dylos.py | sensors/dylos.py | import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
small, large = [int(x) for x in line.split(b',')]
LOGGER.debug("Read from serial port: %s %s", small, large)
return {"small": small, "large": large}
return read
| import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
LOGGER.debug("Read from serial port: %s", line)
small, large = [int(x.strip()) for x in line.split(b',')]
LOGGER.debug("Small: %s, Large: %s", small, large)
return {"small": small, "large": large}
return read
| Print better logs for Dylos | Print better logs for Dylos
| Python | apache-2.0 | VDL-PRISM/dylos | import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
small, large = [int(x) for x in line.split(b',')]
LOGGER.debug("Read from serial port: %s %s", small, large)
return {"small": small, "large": large}
return read
Print better logs for Dylos | import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
LOGGER.debug("Read from serial port: %s", line)
small, large = [int(x.strip()) for x in line.split(b',')]
LOGGER.debug("Small: %s, Large: %s", small, large)
return {"small": small, "large": large}
return read
| <commit_before>import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
small, large = [int(x) for x in line.split(b',')]
LOGGER.debug("Read from serial port: %s %s", small, large)
return {"small": small, "large": large}
return read
<commit_msg>Print better logs for Dylos<commit_after> | import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
LOGGER.debug("Read from serial port: %s", line)
small, large = [int(x.strip()) for x in line.split(b',')]
LOGGER.debug("Small: %s, Large: %s", small, large)
return {"small": small, "large": large}
return read
| import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
small, large = [int(x) for x in line.split(b',')]
LOGGER.debug("Read from serial port: %s %s", small, large)
return {"small": small, "large": large}
return read
Print better logs for Dylosimport logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
LOGGER.debug("Read from serial port: %s", line)
small, large = [int(x.strip()) for x in line.split(b',')]
LOGGER.debug("Small: %s, Large: %s", small, large)
return {"small": small, "large": large}
return read
| <commit_before>import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
small, large = [int(x) for x in line.split(b',')]
LOGGER.debug("Read from serial port: %s %s", small, large)
return {"small": small, "large": large}
return read
<commit_msg>Print better logs for Dylos<commit_after>import logging
import Adafruit_BBIO.UART as UART
import serial
LOGGER = logging.getLogger(__name__)
def setup(port, baudrate):
# Setup UART
UART.setup("UART1")
ser = serial.Serial(port=port, baudrate=baudrate,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS)
if not ser.isOpen():
ser.open()
def read():
line = ser.readline()
LOGGER.debug("Read from serial port: %s", line)
small, large = [int(x.strip()) for x in line.split(b',')]
LOGGER.debug("Small: %s, Large: %s", small, large)
return {"small": small, "large": large}
return read
|
ed68f3f8961fd9cc212c2bc7700ba758af51d335 | mailchimp_manager/tests/test_list_manager.py | mailchimp_manager/tests/test_list_manager.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
try:
from mailchimp_manager import MailChimpManager
except:
# Local module testing - assuming mailchimp_manager folder put in grandparent folder
import sys, os.path
# Hack for import module in grandparent folder
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)))
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
| Update test script for local testing | Update test script for local testing
| Python | bsd-3-clause | Kudo/mailchimp_manager | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
Update test script for local testing | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
try:
from mailchimp_manager import MailChimpManager
except:
# Local module testing - assuming mailchimp_manager folder put in grandparent folder
import sys, os.path
# Hack for import module in grandparent folder
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)))
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Update test script for local testing<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
try:
from mailchimp_manager import MailChimpManager
except:
# Local module testing - assuming mailchimp_manager folder put in grandparent folder
import sys, os.path
# Hack for import module in grandparent folder
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)))
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
Update test script for local testing#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
try:
from mailchimp_manager import MailChimpManager
except:
# Local module testing - assuming mailchimp_manager folder put in grandparent folder
import sys, os.path
# Hack for import module in grandparent folder
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)))
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Update test script for local testing<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_list_manager.py - Integration test for list management of mailchimp_manager
"""
try:
from mailchimp_manager import MailChimpManager
except:
# Local module testing - assuming mailchimp_manager folder put in grandparent folder
import sys, os.path
# Hack for import module in grandparent folder
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)))
from mailchimp_manager import MailChimpManager
import unittest
TEST_EMAIL = u'john.doe@gmail.com'
class TestMailChimpListManager(unittest.TestCase):
def test_Subscribe_TestEmailInSubscribedList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.subscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInSubscribeList_False(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers()
self.assertNotIn(TEST_EMAIL, emails)
def test_Unsubscribe_TestEmailInUnsubscribeList_True(self):
listMgr = MailChimpManager.ListManager()
listMgr.unsubscribe(TEST_EMAIL)
emails = listMgr.listMembers(MailChimpManager.ListManager.MEMBER_STATUS.UNSUBSCRIBED)
self.assertIn(TEST_EMAIL, emails)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestMailChimpListManager)
unittest.TextTestRunner(verbosity=2).run(suite)
|
959b5fd80a2eeb4ddb56dea07edd16c1aeabc4ff | userprofile/admin.py | userprofile/admin.py | from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
admin.site.register(Profile)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
| from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
class ProfileAdmin(admin.ModelAdmin):
list_filter = (
('tos_accepted', admin.BooleanFieldListFilter),
)
admin.site.register(Profile, ProfileAdmin)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
| Add filtering option to see profiles that have not accepted new tos | Add filtering option to see profiles that have not accepted new tos
| Python | mit | hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website | from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
admin.site.register(Profile)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
Add filtering option to see profiles that have not accepted new tos | from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
class ProfileAdmin(admin.ModelAdmin):
list_filter = (
('tos_accepted', admin.BooleanFieldListFilter),
)
admin.site.register(Profile, ProfileAdmin)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
| <commit_before>from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
admin.site.register(Profile)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
<commit_msg>Add filtering option to see profiles that have not accepted new tos<commit_after> | from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
class ProfileAdmin(admin.ModelAdmin):
list_filter = (
('tos_accepted', admin.BooleanFieldListFilter),
)
admin.site.register(Profile, ProfileAdmin)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
| from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
admin.site.register(Profile)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
Add filtering option to see profiles that have not accepted new tosfrom django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
class ProfileAdmin(admin.ModelAdmin):
list_filter = (
('tos_accepted', admin.BooleanFieldListFilter),
)
admin.site.register(Profile, ProfileAdmin)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
| <commit_before>from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
admin.site.register(Profile)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
<commit_msg>Add filtering option to see profiles that have not accepted new tos<commit_after>from django.contrib import admin
from .models import Profile, Skill, DutyTime, Group
class ProfileAdmin(admin.ModelAdmin):
list_filter = (
('tos_accepted', admin.BooleanFieldListFilter),
)
admin.site.register(Profile, ProfileAdmin)
admin.site.register(Skill)
admin.site.register(DutyTime)
admin.site.register(Group)
|
a795274811b3df67a04593b1889d9c93fed40737 | examples/webhooks.py | examples/webhooks.py | from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
| from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data.decode('utf-8')
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
| Fix example for Python 3 compatibility | Fix example for Python 3 compatibility
| Python | mit | stripe/stripe-python | from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
Fix example for Python 3 compatibility | from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data.decode('utf-8')
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
| <commit_before>from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
<commit_msg>Fix example for Python 3 compatibility<commit_after> | from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data.decode('utf-8')
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
| from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
Fix example for Python 3 compatibilityfrom __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data.decode('utf-8')
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
| <commit_before>from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
<commit_msg>Fix example for Python 3 compatibility<commit_after>from __future__ import print_function
import os
import stripe
from flask import Flask, request
stripe.api_key = os.environ.get('STRIPE_SECRET_KEY')
webhook_secret = os.environ.get('WEBHOOK_SECRET')
app = Flask(__name__)
@app.route('/webhooks', methods=['POST'])
def webhooks():
payload = request.data.decode('utf-8')
received_sig = request.headers.get('Stripe-Signature', None)
try:
event = stripe.Webhook.construct_event(
payload, received_sig, webhook_secret)
except ValueError:
print("Error while decoding event!")
return 'Bad payload', 400
except stripe.error.SignatureVerificationError:
print("Invalid signature!")
return 'Bad signature', 400
print("Received event: id={id}, type={type}".format(
id=event.id, type=event.type))
return '', 200
if __name__ == '__main__':
app.run(port=int(os.environ.get('PORT', 5000)))
|
8664741930e5a21bfbdcffe2fc0ca612b4b3e4ea | clburlison_scripts/dropbox_folder_location/dropbox_folder_location.py | clburlison_scripts/dropbox_folder_location/dropbox_folder_location.py | #!/usr/bin/python
"""H/t to eholtam for posting in slack"""
import json
import os
print("Personal: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('personal', {}).get('path', '').replace('', 'None'))
print("Business: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('business', {}).get('path', '').replace('', 'None'))
| #!/usr/bin/python
import json, os, pprint
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
# To list all dropbox data
pprint.pprint(data)
print('')
# Or to find just the paths
for i in ['personal', 'business']:
print('{}:'.format(i.capitalize()))
print(data.get(i, {}).get('path', ''))
| Update dropbox folder location script | Update dropbox folder location script
| Python | mit | clburlison/scripts,clburlison/scripts,clburlison/scripts | #!/usr/bin/python
"""H/t to eholtam for posting in slack"""
import json
import os
print("Personal: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('personal', {}).get('path', '').replace('', 'None'))
print("Business: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('business', {}).get('path', '').replace('', 'None'))
Update dropbox folder location script | #!/usr/bin/python
import json, os, pprint
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
# To list all dropbox data
pprint.pprint(data)
print('')
# Or to find just the paths
for i in ['personal', 'business']:
print('{}:'.format(i.capitalize()))
print(data.get(i, {}).get('path', ''))
| <commit_before>#!/usr/bin/python
"""H/t to eholtam for posting in slack"""
import json
import os
print("Personal: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('personal', {}).get('path', '').replace('', 'None'))
print("Business: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('business', {}).get('path', '').replace('', 'None'))
<commit_msg>Update dropbox folder location script<commit_after> | #!/usr/bin/python
import json, os, pprint
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
# To list all dropbox data
pprint.pprint(data)
print('')
# Or to find just the paths
for i in ['personal', 'business']:
print('{}:'.format(i.capitalize()))
print(data.get(i, {}).get('path', ''))
| #!/usr/bin/python
"""H/t to eholtam for posting in slack"""
import json
import os
print("Personal: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('personal', {}).get('path', '').replace('', 'None'))
print("Business: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('business', {}).get('path', '').replace('', 'None'))
Update dropbox folder location script#!/usr/bin/python
import json, os, pprint
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
# To list all dropbox data
pprint.pprint(data)
print('')
# Or to find just the paths
for i in ['personal', 'business']:
print('{}:'.format(i.capitalize()))
print(data.get(i, {}).get('path', ''))
| <commit_before>#!/usr/bin/python
"""H/t to eholtam for posting in slack"""
import json
import os
print("Personal: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('personal', {}).get('path', '').replace('', 'None'))
print("Business: ")
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
print(data.get('business', {}).get('path', '').replace('', 'None'))
<commit_msg>Update dropbox folder location script<commit_after>#!/usr/bin/python
import json, os, pprint
f = open(os.path.expanduser('~/.dropbox/info.json'), 'r').read()
data = json.loads(f)
# To list all dropbox data
pprint.pprint(data)
print('')
# Or to find just the paths
for i in ['personal', 'business']:
print('{}:'.format(i.capitalize()))
print(data.get(i, {}).get('path', ''))
|
7a5fdf50f4a986336c577ce57ed73da1c445b6cd | db_mutex/models.py | db_mutex/models.py | from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
| from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = 'db_mutex'
| Declare app_label in model Meta class to work with Django 1.9 | Declare app_label in model Meta class to work with Django 1.9
Fixes RemovedInDjango19Warning:
Model class db_mutex.models.DBMutex doesn't declare an explicit
app_label and either isn't in an application in INSTALLED_APPS or else
was imported before its application was loaded. This will no longer be
supported in Django 1.9.
Credit: https://github.com/minervaproject/django-db-mutex/commit/429d701cce7ad2cf8fb77f169c4af6f2f27562fd
| Python | mit | ambitioninc/django-db-mutex,minervaproject/django-db-mutex | from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
Declare app_label in model Meta class to work with Django 1.9
Fixes RemovedInDjango19Warning:
Model class db_mutex.models.DBMutex doesn't declare an explicit
app_label and either isn't in an application in INSTALLED_APPS or else
was imported before its application was loaded. This will no longer be
supported in Django 1.9.
Credit: https://github.com/minervaproject/django-db-mutex/commit/429d701cce7ad2cf8fb77f169c4af6f2f27562fd | from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = 'db_mutex'
| <commit_before>from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
<commit_msg>Declare app_label in model Meta class to work with Django 1.9
Fixes RemovedInDjango19Warning:
Model class db_mutex.models.DBMutex doesn't declare an explicit
app_label and either isn't in an application in INSTALLED_APPS or else
was imported before its application was loaded. This will no longer be
supported in Django 1.9.
Credit: https://github.com/minervaproject/django-db-mutex/commit/429d701cce7ad2cf8fb77f169c4af6f2f27562fd<commit_after> | from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = 'db_mutex'
| from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
Declare app_label in model Meta class to work with Django 1.9
Fixes RemovedInDjango19Warning:
Model class db_mutex.models.DBMutex doesn't declare an explicit
app_label and either isn't in an application in INSTALLED_APPS or else
was imported before its application was loaded. This will no longer be
supported in Django 1.9.
Credit: https://github.com/minervaproject/django-db-mutex/commit/429d701cce7ad2cf8fb77f169c4af6f2f27562fdfrom django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = 'db_mutex'
| <commit_before>from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
<commit_msg>Declare app_label in model Meta class to work with Django 1.9
Fixes RemovedInDjango19Warning:
Model class db_mutex.models.DBMutex doesn't declare an explicit
app_label and either isn't in an application in INSTALLED_APPS or else
was imported before its application was loaded. This will no longer be
supported in Django 1.9.
Credit: https://github.com/minervaproject/django-db-mutex/commit/429d701cce7ad2cf8fb77f169c4af6f2f27562fd<commit_after>from django.db import models
class DBMutex(models.Model):
"""
Models a mutex lock with a ``lock_id`` and a ``creation_time``.
:type lock_id: str
:param lock_id: A unique CharField with a max length of 256
:type creation_time: datetime
:param creation_time: The creation time of the mutex lock
"""
lock_id = models.CharField(max_length=256, unique=True)
creation_time = models.DateTimeField(auto_now_add=True)
class Meta:
app_label = 'db_mutex'
|
a89f2f52170ffbb238d01f58650bcb4e55f3253a | structure.py | structure.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, None), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__class | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, kwargs[k]), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__class__
| Structure keyword get accepted now | Bugfix: Structure keyword get accepted now
| Python | mit | hastern/jelly | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, None), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__classBugfix: Structure keyword get accepted now | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, kwargs[k]), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__class__
| <commit_before>#!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, None), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__class<commit_msg>Bugfix: Structure keyword get accepted now<commit_after> | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, kwargs[k]), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__class__
| #!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, None), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__classBugfix: Structure keyword get accepted now#!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, kwargs[k]), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__class__
| <commit_before>#!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, None), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__class<commit_msg>Bugfix: Structure keyword get accepted now<commit_after>#!/usr/bin/env python
# -*- coding:utf-8 -*-
import logging
# We are assuming, that there is an already configured logger present
logger = logging.getLogger(__name__)
class Structure(object):
"""Simple struct-like object.
members are controlled via the contents of the __slots__ list."""
__slots__ = []
"""Structure members"""
__defaults__ = {}
"""Default values for (a part of) the structure members.
__defaults__.keys() must be a (inproper) subset __slots__."""
def __init__(self, *args, **kwargs):
"""
@param *args: Positional arguments
@param **kwargs: Keyword arguments
"""
# Initialize all members with None
map(lambda k: self.__setattr__(k, self.__defaults__[k]() if k in self.__defaults__ else None), self.__slots__)
# Positional definition of members
for i,a in enumerate(args):
if len(self.__slots__) > i:
self.__setattr__(self.__slots__[i], a)
# Keyword definition of members
map(lambda k: self.__setattr__(k, kwargs[k]), filter(lambda k: k in self.__slots__, kwargs))
@property
def kind(self):
return self.__class__
|
91c3f218bdd5a660568238daa16c217501d39d05 | create_database.py | create_database.py | import author
import commit
import config
import os
import pygit2
import sqlalchemy
repo = pygit2.Repository(config.REPO_PATH)
# Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
config.BASE.metadata.create_all(engine)
| from author import Author
from commit import Commit
import config
import os
import pygit2
import sqlalchemy
# If it exists and we want to reset the DB, remove the file
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
config.BASE.metadata.create_all(engine)
Session = sqlalchemy.orm.sessionmaker(bind=engine)
session = Session()
repo = pygit2.Repository(config.REPO_PATH)
for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
author = session.query(Author).filter(Author.email == commit.author.email).first()
if not author:
author = Author(commit.author.name, commit.author.email)
session.add(author)
committer = session.query(Author).filter(Author.email == commit.committer.email).first()
if not committer:
committer = Author(commit.committer.name, commit.committer.email)
session.add(committer)
session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
session.commit()
| Create database now properly loads all authors and commits into the repository | Create database now properly loads all authors and commits into the repository
| Python | mit | mglidden/git-analysis,mglidden/git-analysis | import author
import commit
import config
import os
import pygit2
import sqlalchemy
repo = pygit2.Repository(config.REPO_PATH)
# Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
config.BASE.metadata.create_all(engine)
Create database now properly loads all authors and commits into the repository | from author import Author
from commit import Commit
import config
import os
import pygit2
import sqlalchemy
# If it exists and we want to reset the DB, remove the file
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
config.BASE.metadata.create_all(engine)
Session = sqlalchemy.orm.sessionmaker(bind=engine)
session = Session()
repo = pygit2.Repository(config.REPO_PATH)
for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
author = session.query(Author).filter(Author.email == commit.author.email).first()
if not author:
author = Author(commit.author.name, commit.author.email)
session.add(author)
committer = session.query(Author).filter(Author.email == commit.committer.email).first()
if not committer:
committer = Author(commit.committer.name, commit.committer.email)
session.add(committer)
session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
session.commit()
| <commit_before>import author
import commit
import config
import os
import pygit2
import sqlalchemy
repo = pygit2.Repository(config.REPO_PATH)
# Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
config.BASE.metadata.create_all(engine)
<commit_msg>Create database now properly loads all authors and commits into the repository<commit_after> | from author import Author
from commit import Commit
import config
import os
import pygit2
import sqlalchemy
# If it exists and we want to reset the DB, remove the file
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
config.BASE.metadata.create_all(engine)
Session = sqlalchemy.orm.sessionmaker(bind=engine)
session = Session()
repo = pygit2.Repository(config.REPO_PATH)
for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
author = session.query(Author).filter(Author.email == commit.author.email).first()
if not author:
author = Author(commit.author.name, commit.author.email)
session.add(author)
committer = session.query(Author).filter(Author.email == commit.committer.email).first()
if not committer:
committer = Author(commit.committer.name, commit.committer.email)
session.add(committer)
session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
session.commit()
| import author
import commit
import config
import os
import pygit2
import sqlalchemy
repo = pygit2.Repository(config.REPO_PATH)
# Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
config.BASE.metadata.create_all(engine)
Create database now properly loads all authors and commits into the repositoryfrom author import Author
from commit import Commit
import config
import os
import pygit2
import sqlalchemy
# If it exists and we want to reset the DB, remove the file
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
config.BASE.metadata.create_all(engine)
Session = sqlalchemy.orm.sessionmaker(bind=engine)
session = Session()
repo = pygit2.Repository(config.REPO_PATH)
for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
author = session.query(Author).filter(Author.email == commit.author.email).first()
if not author:
author = Author(commit.author.name, commit.author.email)
session.add(author)
committer = session.query(Author).filter(Author.email == commit.committer.email).first()
if not committer:
committer = Author(commit.committer.name, commit.committer.email)
session.add(committer)
session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
session.commit()
| <commit_before>import author
import commit
import config
import os
import pygit2
import sqlalchemy
repo = pygit2.Repository(config.REPO_PATH)
# Probably want to completly reset the DB
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=True)
config.BASE.metadata.create_all(engine)
<commit_msg>Create database now properly loads all authors and commits into the repository<commit_after>from author import Author
from commit import Commit
import config
import os
import pygit2
import sqlalchemy
# If it exists and we want to reset the DB, remove the file
if config.RESET_DB and os.path.exists(config.DB_PATH):
os.remove(config.DB_PATH)
engine = sqlalchemy.create_engine(config.DB_URL, echo=False)
config.BASE.metadata.create_all(engine)
Session = sqlalchemy.orm.sessionmaker(bind=engine)
session = Session()
repo = pygit2.Repository(config.REPO_PATH)
for commit in repo.walk(repo.head.target, pygit2.GIT_SORT_TIME):
author = session.query(Author).filter(Author.email == commit.author.email).first()
if not author:
author = Author(commit.author.name, commit.author.email)
session.add(author)
committer = session.query(Author).filter(Author.email == commit.committer.email).first()
if not committer:
committer = Author(commit.committer.name, commit.committer.email)
session.add(committer)
session.add(Commit(commit.message, commit.commit_time, committer.email, author.email))
session.commit()
|
28681f8b2f88f818c2b5a0197a00df90d3065aaf | models/official/detection/configs/factory.py | models/official/detection/configs/factory.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from configs import shapemask_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
| Fix shapemask_config import to handle copy.bara masking that allows cloud detection test to pass. | Fix shapemask_config import to handle copy.bara masking that allows cloud detection test to pass.
PiperOrigin-RevId: 267404830
| Python | apache-2.0 | tensorflow/tpu,tensorflow/tpu,tensorflow/tpu,tensorflow/tpu | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from configs import shapemask_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
Fix shapemask_config import to handle copy.bara masking that allows cloud detection test to pass.
PiperOrigin-RevId: 267404830 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
| <commit_before># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from configs import shapemask_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
<commit_msg>Fix shapemask_config import to handle copy.bara masking that allows cloud detection test to pass.
PiperOrigin-RevId: 267404830<commit_after> | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from configs import shapemask_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
Fix shapemask_config import to handle copy.bara masking that allows cloud detection test to pass.
PiperOrigin-RevId: 267404830# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
| <commit_before># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from configs import shapemask_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
<commit_msg>Fix shapemask_config import to handle copy.bara masking that allows cloud detection test to pass.
PiperOrigin-RevId: 267404830<commit_after># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Factory to provide model configs."""
from configs import retinanet_config
from hyperparameters import params_dict
def config_generator(model):
"""Model function generator."""
if model == 'retinanet':
default_config = retinanet_config.RETINANET_CFG
restrictions = retinanet_config.RETINANET_RESTRICTIONS
elif model == 'shapemask':
default_config = shapemask_config.SHAPEMASK_CFG
restrictions = shapemask_config.SHAPEMASK_RESTRICTIONS
else:
raise ValueError('Model %s is not supported.' % model)
return params_dict.ParamsDict(default_config, restrictions)
|
a51c8238ba61d213d089767ba38f18f29dacb08f | dakis/api/views.py | dakis/api/views.py | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| Add exp and task ids to API | Add exp and task ids to API
| Python | agpl-3.0 | niekas/dakis,niekas/dakis,niekas/dakis | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
Add exp and task ids to API | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| <commit_before>from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
<commit_msg>Add exp and task ids to API<commit_after> | from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
Add exp and task ids to APIfrom rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
| <commit_before>from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
<commit_msg>Add exp and task ids to API<commit_after>from rest_framework import serializers, viewsets
from rest_framework import filters
from django.contrib.auth.models import User
from dakis.core.models import Experiment, Task
class ExperimentSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Experiment
exclude = ('author',)
def create(self, data):
user = self.context['request'].user
if user.is_authenticated():
data['author'] = user
return super(ExperimentSerializer, self).create(data)
class TaskSerializer(serializers.HyperlinkedModelSerializer):
id = serializers.IntegerField(label='ID', read_only=True)
class Meta:
model = Task
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('first_name', 'last_name', 'username', 'email')
class ExperimentViewSet(viewsets.ModelViewSet):
queryset = Experiment.objects.all()
serializer_class = ExperimentSerializer
class TaskViewSet(viewsets.ModelViewSet):
queryset = Task.objects.all()
serializer_class = TaskSerializer
filter_fields = ('experiment', 'func_cls', 'func_id', 'status')
filter_backends = (filters.DjangoFilterBackend,)
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
|
fedd90e80a6c56ab406e52b9b0ece14b324fa5d5 | aldryn_apphooks_config/fields.py | aldryn_apphooks_config/fields.py | # -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
class AppHookConfigFormField(forms.ModelChoiceField):
widget = AppHookConfigWidget
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
| # -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigFormField(forms.ModelChoiceField):
def __init__(self, queryset, empty_label="---------", required=True,
widget=AppHookConfigWidget, *args, **kwargs):
super(AppHookConfigFormField, self).__init__(queryset, empty_label,
required, widget, *args, **kwargs)
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
| Improve the ability for developers to extend or modify | Improve the ability for developers to extend or modify
| Python | bsd-3-clause | aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config | # -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
class AppHookConfigFormField(forms.ModelChoiceField):
widget = AppHookConfigWidget
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
Improve the ability for developers to extend or modify | # -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigFormField(forms.ModelChoiceField):
def __init__(self, queryset, empty_label="---------", required=True,
widget=AppHookConfigWidget, *args, **kwargs):
super(AppHookConfigFormField, self).__init__(queryset, empty_label,
required, widget, *args, **kwargs)
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
| <commit_before># -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
class AppHookConfigFormField(forms.ModelChoiceField):
widget = AppHookConfigWidget
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
<commit_msg>Improve the ability for developers to extend or modify<commit_after> | # -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigFormField(forms.ModelChoiceField):
def __init__(self, queryset, empty_label="---------", required=True,
widget=AppHookConfigWidget, *args, **kwargs):
super(AppHookConfigFormField, self).__init__(queryset, empty_label,
required, widget, *args, **kwargs)
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
| # -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
class AppHookConfigFormField(forms.ModelChoiceField):
widget = AppHookConfigWidget
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
Improve the ability for developers to extend or modify# -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigFormField(forms.ModelChoiceField):
def __init__(self, queryset, empty_label="---------", required=True,
widget=AppHookConfigWidget, *args, **kwargs):
super(AppHookConfigFormField, self).__init__(queryset, empty_label,
required, widget, *args, **kwargs)
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
| <commit_before># -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
class AppHookConfigFormField(forms.ModelChoiceField):
widget = AppHookConfigWidget
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
<commit_msg>Improve the ability for developers to extend or modify<commit_after># -*- coding: utf-8 -*-
from django import forms
from django.db import models
from django.utils.translation import ugettext_lazy as _
from .widgets import AppHookConfigWidget
class AppHookConfigFormField(forms.ModelChoiceField):
def __init__(self, queryset, empty_label="---------", required=True,
widget=AppHookConfigWidget, *args, **kwargs):
super(AppHookConfigFormField, self).__init__(queryset, empty_label,
required, widget, *args, **kwargs)
class AppHookConfigField(models.ForeignKey):
def __init__(self, *args, **kwargs):
kwargs.update({'help_text': _(u'When selecting a value, the form is reloaded to get the updated default')})
super(AppHookConfigField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
kwargs.update({'form_class': AppHookConfigFormField})
return super(AppHookConfigField, self).formfield(**kwargs)
|
16002b001a120410e4f993ad6fb93b123de183cb | astrodynamics/tests/test_util.py | astrodynamics/tests/test_util.py | # coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
| # coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
assert verify_unit(0, '') == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
with pytest.raises(ValueError):
verify_unit(0, 'm')
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
assert verify_unit(1 * u.meter, 'm') == 1 * u.meter
| Test string form of verify_unit | Test string form of verify_unit
| Python | mit | python-astrodynamics/astrodynamics,python-astrodynamics/astrodynamics | # coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
Test string form of verify_unit | # coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
assert verify_unit(0, '') == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
with pytest.raises(ValueError):
verify_unit(0, 'm')
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
assert verify_unit(1 * u.meter, 'm') == 1 * u.meter
| <commit_before># coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
<commit_msg>Test string form of verify_unit<commit_after> | # coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
assert verify_unit(0, '') == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
with pytest.raises(ValueError):
verify_unit(0, 'm')
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
assert verify_unit(1 * u.meter, 'm') == 1 * u.meter
| # coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
Test string form of verify_unit# coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
assert verify_unit(0, '') == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
with pytest.raises(ValueError):
verify_unit(0, 'm')
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
assert verify_unit(1 * u.meter, 'm') == 1 * u.meter
| <commit_before># coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
<commit_msg>Test string form of verify_unit<commit_after># coding: utf-8
from __future__ import absolute_import, division, print_function
import pytest
from astropy import units as u
from astrodynamics.util import verify_unit
def test_verify_unit():
# Implicit dimensionless values are allowed, test that Quantity is returned.
assert verify_unit(0, u.one) == 0 * u.one
assert verify_unit(0, '') == 0 * u.one
# Test failure mode
with pytest.raises(ValueError):
verify_unit(0, u.meter)
with pytest.raises(ValueError):
verify_unit(0, 'm')
# Quantity should be passed back if unit matches
assert verify_unit(1 * u.meter, u.meter) == 1 * u.meter
assert verify_unit(1 * u.meter, 'm') == 1 * u.meter
|
631bfc08a31477a81103cb83329ce4b29d977658 | openedx/core/djangoapps/content/course_overviews/migrations/0009_readd_facebook_url.py | openedx/core/djangoapps/content/course_overviews/migrations/0009_readd_facebook_url.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, OperationalError, connection
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field
# We need to handle the case where our noop 0008 ran AND the case
# where the original 0008 ran. We do that by using Django's introspection
# API to query INFORMATION_SCHEMA. _meta is unavailable as the
# column has already been removed from the model.
fields = connection.introspection.get_table_description(connection.cursor(),'course_overviews_courseoverview')
operations = []
if not any(f.name == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, connection
def table_description():
"""Handle Mysql/Pg vs Sqlite"""
# django's mysql/pg introspection.get_table_description tries to select *
# from table and fails during initial migrations from scratch.
# sqlite does not have this failure, so we can use the API.
# For not-sqlite, query information-schema directly with code lifted
# from the internals of django.db.backends.mysql.introspection.py
if connection.vendor == 'sqlite':
fields = connection.introspection.get_table_description(connection.cursor(), 'course_overviews_courseoverview')
return [f.name for f in fields]
else:
cursor = connection.cursor()
cursor.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'course_overviews_courseoverview' AND table_schema = DATABASE()""")
rows = cursor.fetchall()
return [r[0] for r in rows]
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field We need to
# handle the case where our noop 0008 ran AND the case where the original
# 0008 ran. We do that by using the standard information_schema to find out
# what columns exist. _meta is unavailable as the column has already been
# removed from the model
operations = []
fields = table_description()
# during a migration from scratch, fields will be empty, but we do not want to add
# an additional facebook_url
if fields and not any(f == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
| Migrate correctly from scratch also | Migrate correctly from scratch also
Unfortunately, instrospection.get_table_description runs
select * from course_overview_courseoverview, which of course
does not exist while django is calculating initial migrations, causing
this to fail. Additionally, sqlite does not support information_schema,
but does not do a select * from the table.
Lift the main part of mysql's get_table_description up to the migration itself
and just inspect it directly. Continue to call the API for sqlite.
| Python | agpl-3.0 | JioEducation/edx-platform,Lektorium-LLC/edx-platform,cecep-edu/edx-platform,jzoldak/edx-platform,eduNEXT/edunext-platform,chrisndodge/edx-platform,gsehub/edx-platform,shabab12/edx-platform,arbrandes/edx-platform,CredoReference/edx-platform,fintech-circle/edx-platform,miptliot/edx-platform,amir-qayyum-khan/edx-platform,fintech-circle/edx-platform,pepeportela/edx-platform,eduNEXT/edx-platform,Stanford-Online/edx-platform,stvstnfrd/edx-platform,fintech-circle/edx-platform,pabloborrego93/edx-platform,eduNEXT/edx-platform,CourseTalk/edx-platform,CredoReference/edx-platform,naresh21/synergetics-edx-platform,gymnasium/edx-platform,marcore/edx-platform,tanmaykm/edx-platform,ahmedaljazzar/edx-platform,analyseuc3m/ANALYSE-v1,jjmiranda/edx-platform,louyihua/edx-platform,synergeticsedx/deployment-wipro,romain-li/edx-platform,cpennington/edx-platform,itsjeyd/edx-platform,proversity-org/edx-platform,gymnasium/edx-platform,devs1991/test_edx_docmode,JioEducation/edx-platform,pabloborrego93/edx-platform,raccoongang/edx-platform,longmen21/edx-platform,angelapper/edx-platform,longmen21/edx-platform,CourseTalk/edx-platform,jjmiranda/edx-platform,hastexo/edx-platform,ahmedaljazzar/edx-platform,miptliot/edx-platform,Edraak/edraak-platform,cecep-edu/edx-platform,teltek/edx-platform,CourseTalk/edx-platform,solashirai/edx-platform,caesar2164/edx-platform,procangroup/edx-platform,waheedahmed/edx-platform,solashirai/edx-platform,EDUlib/edx-platform,EDUlib/edx-platform,louyihua/edx-platform,caesar2164/edx-platform,chrisndodge/edx-platform,devs1991/test_edx_docmode,appsembler/edx-platform,kmoocdev2/edx-platform,procangroup/edx-platform,gymnasium/edx-platform,prarthitm/edxplatform,pepeportela/edx-platform,kmoocdev2/edx-platform,caesar2164/edx-platform,arbrandes/edx-platform,deepsrijit1105/edx-platform,msegado/edx-platform,devs1991/test_edx_docmode,msegado/edx-platform,raccoongang/edx-platform,analyseuc3m/ANALYSE-v1,mbareta/edx-platform-ft,cecep-edu/edx-platform,prarthitm/edxplatform,eduNEXT/edunext-platform,pepeportela/edx-platform,UOMx/edx-platform,marcore/edx-platform,jolyonb/edx-platform,pabloborrego93/edx-platform,JioEducation/edx-platform,longmen21/edx-platform,lduarte1991/edx-platform,edx/edx-platform,naresh21/synergetics-edx-platform,tanmaykm/edx-platform,edx/edx-platform,kmoocdev2/edx-platform,Livit/Livit.Learn.EdX,Stanford-Online/edx-platform,tanmaykm/edx-platform,philanthropy-u/edx-platform,devs1991/test_edx_docmode,UOMx/edx-platform,hastexo/edx-platform,JioEducation/edx-platform,louyihua/edx-platform,prarthitm/edxplatform,angelapper/edx-platform,solashirai/edx-platform,ampax/edx-platform,cpennington/edx-platform,10clouds/edx-platform,edx-solutions/edx-platform,romain-li/edx-platform,romain-li/edx-platform,synergeticsedx/deployment-wipro,deepsrijit1105/edx-platform,pepeportela/edx-platform,solashirai/edx-platform,proversity-org/edx-platform,msegado/edx-platform,hastexo/edx-platform,proversity-org/edx-platform,waheedahmed/edx-platform,eduNEXT/edx-platform,TeachAtTUM/edx-platform,kmoocdev2/edx-platform,ampax/edx-platform,romain-li/edx-platform,fintech-circle/edx-platform,10clouds/edx-platform,EDUlib/edx-platform,proversity-org/edx-platform,chrisndodge/edx-platform,itsjeyd/edx-platform,Edraak/edraak-platform,UOMx/edx-platform,Lektorium-LLC/edx-platform,Livit/Livit.Learn.EdX,mbareta/edx-platform-ft,philanthropy-u/edx-platform,itsjeyd/edx-platform,mbareta/edx-platform-ft,prarthitm/edxplatform,ESOedX/edx-platform,edx/edx-platform,angelapper/edx-platform,louyihua/edx-platform,UOMx/edx-platform,naresh21/synergetics-edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,TeachAtTUM/edx-platform,gsehub/edx-platform,longmen21/edx-platform,a-parhom/edx-platform,arbrandes/edx-platform,gsehub/edx-platform,Stanford-Online/edx-platform,ahmedaljazzar/edx-platform,itsjeyd/edx-platform,shabab12/edx-platform,ESOedX/edx-platform,a-parhom/edx-platform,EDUlib/edx-platform,raccoongang/edx-platform,BehavioralInsightsTeam/edx-platform,stvstnfrd/edx-platform,pabloborrego93/edx-platform,procangroup/edx-platform,eduNEXT/edunext-platform,devs1991/test_edx_docmode,Lektorium-LLC/edx-platform,gymnasium/edx-platform,waheedahmed/edx-platform,devs1991/test_edx_docmode,shabab12/edx-platform,ESOedX/edx-platform,CredoReference/edx-platform,Livit/Livit.Learn.EdX,waheedahmed/edx-platform,jolyonb/edx-platform,Edraak/edraak-platform,ESOedX/edx-platform,amir-qayyum-khan/edx-platform,a-parhom/edx-platform,stvstnfrd/edx-platform,mbareta/edx-platform-ft,stvstnfrd/edx-platform,10clouds/edx-platform,gsehub/edx-platform,jzoldak/edx-platform,chrisndodge/edx-platform,miptliot/edx-platform,analyseuc3m/ANALYSE-v1,jolyonb/edx-platform,jolyonb/edx-platform,ahmedaljazzar/edx-platform,cpennington/edx-platform,raccoongang/edx-platform,teltek/edx-platform,arbrandes/edx-platform,longmen21/edx-platform,CredoReference/edx-platform,naresh21/synergetics-edx-platform,procangroup/edx-platform,mitocw/edx-platform,philanthropy-u/edx-platform,eduNEXT/edx-platform,synergeticsedx/deployment-wipro,TeachAtTUM/edx-platform,kmoocdev2/edx-platform,edx-solutions/edx-platform,amir-qayyum-khan/edx-platform,cecep-edu/edx-platform,solashirai/edx-platform,BehavioralInsightsTeam/edx-platform,devs1991/test_edx_docmode,lduarte1991/edx-platform,appsembler/edx-platform,ampax/edx-platform,jzoldak/edx-platform,analyseuc3m/ANALYSE-v1,Edraak/edraak-platform,synergeticsedx/deployment-wipro,romain-li/edx-platform,hastexo/edx-platform,cpennington/edx-platform,lduarte1991/edx-platform,caesar2164/edx-platform,teltek/edx-platform,teltek/edx-platform,ampax/edx-platform,mitocw/edx-platform,shabab12/edx-platform,devs1991/test_edx_docmode,msegado/edx-platform,Lektorium-LLC/edx-platform,10clouds/edx-platform,jjmiranda/edx-platform,CourseTalk/edx-platform,appsembler/edx-platform,marcore/edx-platform,edx/edx-platform,tanmaykm/edx-platform,mitocw/edx-platform,appsembler/edx-platform,deepsrijit1105/edx-platform,angelapper/edx-platform,jjmiranda/edx-platform,jzoldak/edx-platform,BehavioralInsightsTeam/edx-platform,deepsrijit1105/edx-platform,cecep-edu/edx-platform,Stanford-Online/edx-platform,miptliot/edx-platform,amir-qayyum-khan/edx-platform,waheedahmed/edx-platform,a-parhom/edx-platform,lduarte1991/edx-platform,edx-solutions/edx-platform,TeachAtTUM/edx-platform,edx-solutions/edx-platform,Livit/Livit.Learn.EdX,philanthropy-u/edx-platform,marcore/edx-platform,BehavioralInsightsTeam/edx-platform,mitocw/edx-platform | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, OperationalError, connection
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field
# We need to handle the case where our noop 0008 ran AND the case
# where the original 0008 ran. We do that by using Django's introspection
# API to query INFORMATION_SCHEMA. _meta is unavailable as the
# column has already been removed from the model.
fields = connection.introspection.get_table_description(connection.cursor(),'course_overviews_courseoverview')
operations = []
if not any(f.name == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
Migrate correctly from scratch also
Unfortunately, instrospection.get_table_description runs
select * from course_overview_courseoverview, which of course
does not exist while django is calculating initial migrations, causing
this to fail. Additionally, sqlite does not support information_schema,
but does not do a select * from the table.
Lift the main part of mysql's get_table_description up to the migration itself
and just inspect it directly. Continue to call the API for sqlite. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, connection
def table_description():
"""Handle Mysql/Pg vs Sqlite"""
# django's mysql/pg introspection.get_table_description tries to select *
# from table and fails during initial migrations from scratch.
# sqlite does not have this failure, so we can use the API.
# For not-sqlite, query information-schema directly with code lifted
# from the internals of django.db.backends.mysql.introspection.py
if connection.vendor == 'sqlite':
fields = connection.introspection.get_table_description(connection.cursor(), 'course_overviews_courseoverview')
return [f.name for f in fields]
else:
cursor = connection.cursor()
cursor.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'course_overviews_courseoverview' AND table_schema = DATABASE()""")
rows = cursor.fetchall()
return [r[0] for r in rows]
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field We need to
# handle the case where our noop 0008 ran AND the case where the original
# 0008 ran. We do that by using the standard information_schema to find out
# what columns exist. _meta is unavailable as the column has already been
# removed from the model
operations = []
fields = table_description()
# during a migration from scratch, fields will be empty, but we do not want to add
# an additional facebook_url
if fields and not any(f == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, OperationalError, connection
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field
# We need to handle the case where our noop 0008 ran AND the case
# where the original 0008 ran. We do that by using Django's introspection
# API to query INFORMATION_SCHEMA. _meta is unavailable as the
# column has already been removed from the model.
fields = connection.introspection.get_table_description(connection.cursor(),'course_overviews_courseoverview')
operations = []
if not any(f.name == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
<commit_msg>Migrate correctly from scratch also
Unfortunately, instrospection.get_table_description runs
select * from course_overview_courseoverview, which of course
does not exist while django is calculating initial migrations, causing
this to fail. Additionally, sqlite does not support information_schema,
but does not do a select * from the table.
Lift the main part of mysql's get_table_description up to the migration itself
and just inspect it directly. Continue to call the API for sqlite.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, connection
def table_description():
"""Handle Mysql/Pg vs Sqlite"""
# django's mysql/pg introspection.get_table_description tries to select *
# from table and fails during initial migrations from scratch.
# sqlite does not have this failure, so we can use the API.
# For not-sqlite, query information-schema directly with code lifted
# from the internals of django.db.backends.mysql.introspection.py
if connection.vendor == 'sqlite':
fields = connection.introspection.get_table_description(connection.cursor(), 'course_overviews_courseoverview')
return [f.name for f in fields]
else:
cursor = connection.cursor()
cursor.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'course_overviews_courseoverview' AND table_schema = DATABASE()""")
rows = cursor.fetchall()
return [r[0] for r in rows]
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field We need to
# handle the case where our noop 0008 ran AND the case where the original
# 0008 ran. We do that by using the standard information_schema to find out
# what columns exist. _meta is unavailable as the column has already been
# removed from the model
operations = []
fields = table_description()
# during a migration from scratch, fields will be empty, but we do not want to add
# an additional facebook_url
if fields and not any(f == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, OperationalError, connection
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field
# We need to handle the case where our noop 0008 ran AND the case
# where the original 0008 ran. We do that by using Django's introspection
# API to query INFORMATION_SCHEMA. _meta is unavailable as the
# column has already been removed from the model.
fields = connection.introspection.get_table_description(connection.cursor(),'course_overviews_courseoverview')
operations = []
if not any(f.name == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
Migrate correctly from scratch also
Unfortunately, instrospection.get_table_description runs
select * from course_overview_courseoverview, which of course
does not exist while django is calculating initial migrations, causing
this to fail. Additionally, sqlite does not support information_schema,
but does not do a select * from the table.
Lift the main part of mysql's get_table_description up to the migration itself
and just inspect it directly. Continue to call the API for sqlite.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, connection
def table_description():
"""Handle Mysql/Pg vs Sqlite"""
# django's mysql/pg introspection.get_table_description tries to select *
# from table and fails during initial migrations from scratch.
# sqlite does not have this failure, so we can use the API.
# For not-sqlite, query information-schema directly with code lifted
# from the internals of django.db.backends.mysql.introspection.py
if connection.vendor == 'sqlite':
fields = connection.introspection.get_table_description(connection.cursor(), 'course_overviews_courseoverview')
return [f.name for f in fields]
else:
cursor = connection.cursor()
cursor.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'course_overviews_courseoverview' AND table_schema = DATABASE()""")
rows = cursor.fetchall()
return [r[0] for r in rows]
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field We need to
# handle the case where our noop 0008 ran AND the case where the original
# 0008 ran. We do that by using the standard information_schema to find out
# what columns exist. _meta is unavailable as the column has already been
# removed from the model
operations = []
fields = table_description()
# during a migration from scratch, fields will be empty, but we do not want to add
# an additional facebook_url
if fields and not any(f == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, OperationalError, connection
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field
# We need to handle the case where our noop 0008 ran AND the case
# where the original 0008 ran. We do that by using Django's introspection
# API to query INFORMATION_SCHEMA. _meta is unavailable as the
# column has already been removed from the model.
fields = connection.introspection.get_table_description(connection.cursor(),'course_overviews_courseoverview')
operations = []
if not any(f.name == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
<commit_msg>Migrate correctly from scratch also
Unfortunately, instrospection.get_table_description runs
select * from course_overview_courseoverview, which of course
does not exist while django is calculating initial migrations, causing
this to fail. Additionally, sqlite does not support information_schema,
but does not do a select * from the table.
Lift the main part of mysql's get_table_description up to the migration itself
and just inspect it directly. Continue to call the API for sqlite.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models, connection
def table_description():
"""Handle Mysql/Pg vs Sqlite"""
# django's mysql/pg introspection.get_table_description tries to select *
# from table and fails during initial migrations from scratch.
# sqlite does not have this failure, so we can use the API.
# For not-sqlite, query information-schema directly with code lifted
# from the internals of django.db.backends.mysql.introspection.py
if connection.vendor == 'sqlite':
fields = connection.introspection.get_table_description(connection.cursor(), 'course_overviews_courseoverview')
return [f.name for f in fields]
else:
cursor = connection.cursor()
cursor.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'course_overviews_courseoverview' AND table_schema = DATABASE()""")
rows = cursor.fetchall()
return [r[0] for r in rows]
class Migration(migrations.Migration):
dependencies = [
('course_overviews', '0008_remove_courseoverview_facebook_url'),
]
# An original version of 0008 removed the facebook_url field We need to
# handle the case where our noop 0008 ran AND the case where the original
# 0008 ran. We do that by using the standard information_schema to find out
# what columns exist. _meta is unavailable as the column has already been
# removed from the model
operations = []
fields = table_description()
# during a migration from scratch, fields will be empty, but we do not want to add
# an additional facebook_url
if fields and not any(f == 'facebook_url' for f in fields):
operations += migrations.AddField(
model_name='courseoverview',
name='facebook_url',
field=models.TextField(null=True),
),
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.