commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ad78de49c8ed0f8c766d3098ceaa07dd60ddc865
|
constants.py
|
constants.py
|
#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNERS = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
|
#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNER = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
|
Change name of constant to match code.
|
Change name of constant to match code.
Fix #11.
|
Python
|
mit
|
lifted-studios/AutoCopyright,lifted-studios/AutoCopyright
|
#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNERS = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
Change name of constant to match code.
Fix #11.
|
#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNER = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
|
<commit_before>#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNERS = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
<commit_msg>Change name of constant to match code.
Fix #11.<commit_after>
|
#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNER = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
|
#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNERS = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
Change name of constant to match code.
Fix #11.#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNER = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
|
<commit_before>#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNERS = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
<commit_msg>Change name of constant to match code.
Fix #11.<commit_after>#
# Copyright (c) 2012 by Lifted Studios. All Rights Reserved.
#
ERROR_MISSING_OWNER = 'Default copyright owner not set. Please edit the settings file to correct this.'
LINE_ENDING_UNIX = 'Unix'
LINE_ENDING_WINDOWS = 'Windows'
PLUGIN_NAME = 'AutoCopyright'
SETTING_COPYRIGHT_MESSAGE = 'copyright message'
SETTING_OWNERS = 'owner'
SETTINGS_FILE = PLUGIN_NAME + '.sublime-settings'
SETTINGS_PATH_USER = 'User'
|
d2ce7b64c14e18ca395a2d1dc03123ae8a5735b7
|
ab_game.py
|
ab_game.py
|
#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
if state.get_move_number() == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create a AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
|
#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
mn = state.get_move_number()
if mn == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
if depth > 4:
min_priority = 4
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create an AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
|
Enable min_priority again - seems to be working?
|
Enable min_priority again - seems to be working?
|
Python
|
mit
|
cropleyb/pentai,cropleyb/pentai,cropleyb/pentai
|
#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
if state.get_move_number() == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create a AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
Enable min_priority again - seems to be working?
|
#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
mn = state.get_move_number()
if mn == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
if depth > 4:
min_priority = 4
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create an AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
|
<commit_before>#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
if state.get_move_number() == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create a AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
<commit_msg>Enable min_priority again - seems to be working?<commit_after>
|
#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
mn = state.get_move_number()
if mn == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
if depth > 4:
min_priority = 4
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create an AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
|
#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
if state.get_move_number() == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create a AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
Enable min_priority again - seems to be working?#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
mn = state.get_move_number()
if mn == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
if depth > 4:
min_priority = 4
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create an AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
|
<commit_before>#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
if state.get_move_number() == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create a AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
<commit_msg>Enable min_priority again - seems to be working?<commit_after>#!/usr/bin/python
import board
import pente_exceptions
from ab_state import *
CAPTURE_SCORE_BASE = 120 ** 3
class ABGame():
""" This class acts as a bridge between the AlphaBeta code and my code """
def __init__(self, base_game):
s = self.current_state = ABState()
s.set_state(base_game.current_state)
self.base_game = base_game
def to_move(self, state=None):
if state is None:
state = self.current_state
return state.to_move()
def utility(self, state):
return state.utility()
def successors(self, state, depth):
mn = state.get_move_number()
if mn == 1:
# The first black move is always in the centre
brd_size = self.base_game.get_board().get_size()
centre_pos = (brd_size/2, brd_size/2)
p_i = [centre_pos]
else:
min_priority = 0
if depth > 4:
min_priority = 4
pos_iter = state.get_iter(state.to_move())
p_i = pos_iter.get_iter(state.to_move_colour(), min_priority)
for pos in p_i:
# create an AB_State for each possible move from state
succ = state.create_state(pos)
yield pos, succ
def terminal_test(self, state):
return state.terminal()
|
76600b63940da9322673ce6cd436129a7d65f10d
|
scripts/ec2/terminate_all.py
|
scripts/ec2/terminate_all.py
|
#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
Add import statement for os
|
Add import statement for os
|
Python
|
bsd-2-clause
|
manpen/thrill,manpen/thrill,manpen/thrill,manpen/thrill,manpen/thrill
|
#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
Add import statement for os
|
#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
<commit_before>#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
<commit_msg>Add import statement for os<commit_after>
|
#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
Add import statement for os#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
<commit_before>#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
<commit_msg>Add import statement for os<commit_after>#!/usr/bin/env python
##########################################################################
# scripts/ec2/terminate_all.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import boto3
import os
from subprocess import call
ec2 = boto3.resource('ec2')
filters = [{'Name': 'instance-state-name', 'Values': ['running']}]
if "EC2_KEY_NAME" in os.environ:
filters.append({'Name': 'key-name', 'Values': [os.environ['EC2_KEY_NAME']]})
instances = ec2.instances.filter(Filters=filters)
ids = [instance.id for instance in instances]
print("Terminating:", ids)
ec2.instances.filter(InstanceIds=ids).terminate()
##########################################################################
|
eaa1eb7050a917320091e45d6deed6f6146373d8
|
dash_core_components/__init__.py
|
dash_core_components/__init__.py
|
import os as _os
import dash as _dash
import sys as _sys
from version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
|
import os as _os
import dash as _dash
import sys as _sys
from .version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
|
Extend import statement to support Python 3
|
Extend import statement to support Python 3
|
Python
|
mit
|
plotly/dash-core-components
|
import os as _os
import dash as _dash
import sys as _sys
from version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
Extend import statement to support Python 3
|
import os as _os
import dash as _dash
import sys as _sys
from .version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
|
<commit_before>import os as _os
import dash as _dash
import sys as _sys
from version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
<commit_msg>Extend import statement to support Python 3<commit_after>
|
import os as _os
import dash as _dash
import sys as _sys
from .version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
|
import os as _os
import dash as _dash
import sys as _sys
from version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
Extend import statement to support Python 3import os as _os
import dash as _dash
import sys as _sys
from .version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
|
<commit_before>import os as _os
import dash as _dash
import sys as _sys
from version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
<commit_msg>Extend import statement to support Python 3<commit_after>import os as _os
import dash as _dash
import sys as _sys
from .version import __version__
_current_path = _os.path.dirname(_os.path.abspath(__file__))
_components = _dash.development.component_loader.load_components(
_os.path.join(_current_path, 'metadata.json'),
'dash_core_components'
)
_this_module = _sys.modules[__name__]
_js_dist = [
{
"relative_package_path": "bundle.js",
"external_url": (
"https://unpkg.com/dash-core-components@{}"
"/dash_core_components/bundle.js"
).format(__version__)
}
]
_css_dist = [
{
"relative_package_path": [
"react-select@1.0.0-rc.3.min.css",
"rc-slider@6.1.2.css"
],
"external_url": [
"https://unpkg.com/react-select@1.0.0-rc.3/dist/react-select.min.css",
"https://unpkg.com/rc-slider@6.1.2/assets/index.css"
]
}
]
for component in _components:
setattr(_this_module, component.__name__, component)
setattr(component, '_js_dist', _js_dist)
setattr(component, '_css_dist', _css_dist)
|
2d64c01daebd918c3e6196b1eb3ad62f105c56e0
|
django_google_charts/charts.py
|
django_google_charts/charts.py
|
import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)
|
import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
from django.utils.encoding import python_2_unicode_compatible
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
@python_2_unicode_compatible
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)
|
Make this Python 2.x compatible
|
Make this Python 2.x compatible
|
Python
|
mit
|
danpalmer/django-google-charts,danpalmer/django-google-charts
|
import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)Make this Python 2.x compatible
|
import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
from django.utils.encoding import python_2_unicode_compatible
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
@python_2_unicode_compatible
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)
|
<commit_before>import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)<commit_msg>Make this Python 2.x compatible<commit_after>
|
import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
from django.utils.encoding import python_2_unicode_compatible
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
@python_2_unicode_compatible
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)
|
import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)Make this Python 2.x compatibleimport six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
from django.utils.encoding import python_2_unicode_compatible
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
@python_2_unicode_compatible
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)
|
<commit_before>import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)<commit_msg>Make this Python 2.x compatible<commit_after>import six
import json
from django.core.urlresolvers import reverse
from django.utils.html import format_html, mark_safe
from django.utils.encoding import python_2_unicode_compatible
CHARTS = {}
class ChartMeta(type):
def __new__(cls, name, bases, attrs):
klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)
if klass.chart_slug:
CHARTS[klass.chart_slug] = klass
return klass
@six.add_metaclass(ChartMeta)
@python_2_unicode_compatible
class Chart(object):
options = {}
chart_slug = None
columns = None
def get_data(self):
raise NotImplementedError
def __str__(self):
return format_html(
"<div "
"data-chart-options='{0}'"
"data-chart-url='{1}'"
"></div>",
json.dumps(self.options),
reverse(
'djgc-chart-data',
args=(self.chart_slug,),
),
)
|
947f8d3855ef5a71bbb8726aa73d0694cb8a3416
|
dm_control/suite/common/__init__.py
|
dm_control/suite/common/__init__.py
|
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"common/materials.xml",
"common/skybox.xml",
"common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
|
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"./common/materials.xml",
"./common/skybox.xml",
"./common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
|
Prepend assets dict keys with './' to match filenames in XML
|
Prepend assets dict keys with './' to match filenames in XML
PiperOrigin-RevId: 189602660
|
Python
|
apache-2.0
|
deepmind/dm_control
|
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"common/materials.xml",
"common/skybox.xml",
"common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
Prepend assets dict keys with './' to match filenames in XML
PiperOrigin-RevId: 189602660
|
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"./common/materials.xml",
"./common/skybox.xml",
"./common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
|
<commit_before># Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"common/materials.xml",
"common/skybox.xml",
"common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
<commit_msg>Prepend assets dict keys with './' to match filenames in XML
PiperOrigin-RevId: 189602660<commit_after>
|
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"./common/materials.xml",
"./common/skybox.xml",
"./common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
|
# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"common/materials.xml",
"common/skybox.xml",
"common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
Prepend assets dict keys with './' to match filenames in XML
PiperOrigin-RevId: 189602660# Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"./common/materials.xml",
"./common/skybox.xml",
"./common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
|
<commit_before># Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"common/materials.xml",
"common/skybox.xml",
"common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
<commit_msg>Prepend assets dict keys with './' to match filenames in XML
PiperOrigin-RevId: 189602660<commit_after># Copyright 2017 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Functions to manage the common assets for domains."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from dm_control.utils import io as resources
_SUITE_DIR = os.path.dirname(os.path.dirname(__file__))
_FILENAMES = [
"./common/materials.xml",
"./common/skybox.xml",
"./common/visual.xml",
]
ASSETS = {filename: resources.GetResource(os.path.join(_SUITE_DIR, filename))
for filename in _FILENAMES}
def read_model(model_filename):
"""Reads a model XML file and returns its contents as a string."""
return resources.GetResource(os.path.join(_SUITE_DIR, model_filename))
|
0bcecfdf33f42f85bb9a8e32e79686a41fb5226a
|
django_validator/exceptions.py
|
django_validator/exceptions.py
|
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.ValidationError):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.status_code = status_code
self.code = code
|
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.APIException):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.code = code
self.status_code = status_code
|
Fix Validation import error in older DRF.
|
Fix Validation import error in older DRF.
|
Python
|
mit
|
romain-li/django-validator,romain-li/django-validator
|
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.ValidationError):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.status_code = status_code
self.code = code
Fix Validation import error in older DRF.
|
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.APIException):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.code = code
self.status_code = status_code
|
<commit_before>from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.ValidationError):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.status_code = status_code
self.code = code
<commit_msg>Fix Validation import error in older DRF.<commit_after>
|
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.APIException):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.code = code
self.status_code = status_code
|
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.ValidationError):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.status_code = status_code
self.code = code
Fix Validation import error in older DRF.from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.APIException):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.code = code
self.status_code = status_code
|
<commit_before>from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.ValidationError):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.status_code = status_code
self.code = code
<commit_msg>Fix Validation import error in older DRF.<commit_after>from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.APIException):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.code = code
self.status_code = status_code
|
3cdca61da41fcd2480edcdfa35c47c5b13070ab5
|
tests/test_misc.py
|
tests/test_misc.py
|
# -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(200 <= len(charts) <= 400)
|
# -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(100 <= len(charts) <= 400)
|
Fix charts() test (now there are only 174 charts)
|
Fix charts() test (now there are only 174 charts)
|
Python
|
mit
|
guoguo12/billboard-charts,guoguo12/billboard-charts
|
# -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(200 <= len(charts) <= 400)
Fix charts() test (now there are only 174 charts)
|
# -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(100 <= len(charts) <= 400)
|
<commit_before># -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(200 <= len(charts) <= 400)
<commit_msg>Fix charts() test (now there are only 174 charts)<commit_after>
|
# -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(100 <= len(charts) <= 400)
|
# -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(200 <= len(charts) <= 400)
Fix charts() test (now there are only 174 charts)# -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(100 <= len(charts) <= 400)
|
<commit_before># -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(200 <= len(charts) <= 400)
<commit_msg>Fix charts() test (now there are only 174 charts)<commit_after># -*- coding: utf-8 -*-
import billboard
import unittest
from nose.tools import raises
from requests.exceptions import ConnectionError
import six
class MiscTest(unittest.TestCase):
@raises(ConnectionError)
def testTimeout(self):
"""Checks that using a very small timeout prevents connection."""
billboard.ChartData("hot-100", timeout=1e-9)
@raises(billboard.BillboardNotFoundException)
def testNonExistentChart(self):
"""Checks that requesting a non-existent chart fails."""
billboard.ChartData("does-not-exist")
def testUnicode(self):
"""Checks that the Billboard website does not use Unicode characters."""
chart = billboard.ChartData("hot-100", date="2018-01-27")
self.assertEqual(
chart[97].title, six.text_type("El Bano")
) # With Unicode this should be "El Baño"
def testDifficultTitleCasing(self):
"""Checks that a difficult chart title receives proper casing."""
chart = billboard.ChartData("greatest-r-b-hip-hop-songs")
self.assertEqual(chart.title, "Greatest of All Time Hot R&B/Hip-Hop Songs")
def testCharts(self):
"""Checks that the function for listing all charts returns reasonable
results."""
charts = billboard.charts()
self.assertTrue("hot-100" in charts)
self.assertTrue(100 <= len(charts) <= 400)
|
e484ea554011c032c8152dc5aed65cdceaa1ba01
|
dm_env/_metadata.py
|
dm_env/_metadata.py
|
# pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0a0' # https://www.python.org/dev/peps/pep-0440/
|
# pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
Update semantic version to 1.0
|
Update semantic version to 1.0
PiperOrigin-RevId: 258839428
Change-Id: Idac6ba6750e5fa2c53de0d9a56554d99cc8dcbb8
|
Python
|
apache-2.0
|
deepmind/dm_env
|
# pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0a0' # https://www.python.org/dev/peps/pep-0440/
Update semantic version to 1.0
PiperOrigin-RevId: 258839428
Change-Id: Idac6ba6750e5fa2c53de0d9a56554d99cc8dcbb8
|
# pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
<commit_before># pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0a0' # https://www.python.org/dev/peps/pep-0440/
<commit_msg>Update semantic version to 1.0
PiperOrigin-RevId: 258839428
Change-Id: Idac6ba6750e5fa2c53de0d9a56554d99cc8dcbb8<commit_after>
|
# pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
# pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0a0' # https://www.python.org/dev/peps/pep-0440/
Update semantic version to 1.0
PiperOrigin-RevId: 258839428
Change-Id: Idac6ba6750e5fa2c53de0d9a56554d99cc8dcbb8# pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
<commit_before># pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0a0' # https://www.python.org/dev/peps/pep-0440/
<commit_msg>Update semantic version to 1.0
PiperOrigin-RevId: 258839428
Change-Id: Idac6ba6750e5fa2c53de0d9a56554d99cc8dcbb8<commit_after># pylint: disable=g-bad-file-header
# Copyright 2019 The dm_env Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Package metadata for dm_env.
This is kept in a separate module so that it can be imported from setup.py, at
a time when dm_env's dependencies may not have been installed yet.
"""
__version__ = '1.0' # https://www.python.org/dev/peps/pep-0440/
|
56c0d2ea610aae35edfef2d242e0c4ca6a236a4d
|
crypto.py
|
crypto.py
|
from Crypto.Cipher import AES
import os
from file_io import *
from settings import *
def get_cipher(iv, text):
try:
key = read_file(KEY_FILE, 'rt').strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
bytes = read_file(ENCRYPTED_FILE, 'rb')
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
from Crypto.Cipher import AES
import os
from settings import *
def get_cipher(iv, text):
try:
with open(KEY_FILE, 'rt') as f:
key = f.read().strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
with open(ENCRYPTED_FILE, 'rb') as f:
bytes = f.read()
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
Replace file_io usage with open
|
Replace file_io usage with open
|
Python
|
unlicense
|
kvikshaug/pwkeeper
|
from Crypto.Cipher import AES
import os
from file_io import *
from settings import *
def get_cipher(iv, text):
try:
key = read_file(KEY_FILE, 'rt').strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
bytes = read_file(ENCRYPTED_FILE, 'rb')
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
Replace file_io usage with open
|
from Crypto.Cipher import AES
import os
from settings import *
def get_cipher(iv, text):
try:
with open(KEY_FILE, 'rt') as f:
key = f.read().strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
with open(ENCRYPTED_FILE, 'rb') as f:
bytes = f.read()
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
<commit_before>from Crypto.Cipher import AES
import os
from file_io import *
from settings import *
def get_cipher(iv, text):
try:
key = read_file(KEY_FILE, 'rt').strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
bytes = read_file(ENCRYPTED_FILE, 'rb')
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
<commit_msg>Replace file_io usage with open<commit_after>
|
from Crypto.Cipher import AES
import os
from settings import *
def get_cipher(iv, text):
try:
with open(KEY_FILE, 'rt') as f:
key = f.read().strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
with open(ENCRYPTED_FILE, 'rb') as f:
bytes = f.read()
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
from Crypto.Cipher import AES
import os
from file_io import *
from settings import *
def get_cipher(iv, text):
try:
key = read_file(KEY_FILE, 'rt').strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
bytes = read_file(ENCRYPTED_FILE, 'rb')
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
Replace file_io usage with openfrom Crypto.Cipher import AES
import os
from settings import *
def get_cipher(iv, text):
try:
with open(KEY_FILE, 'rt') as f:
key = f.read().strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
with open(ENCRYPTED_FILE, 'rb') as f:
bytes = f.read()
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
<commit_before>from Crypto.Cipher import AES
import os
from file_io import *
from settings import *
def get_cipher(iv, text):
try:
key = read_file(KEY_FILE, 'rt').strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
bytes = read_file(ENCRYPTED_FILE, 'rb')
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
<commit_msg>Replace file_io usage with open<commit_after>from Crypto.Cipher import AES
import os
from settings import *
def get_cipher(iv, text):
try:
with open(KEY_FILE, 'rt') as f:
key = f.read().strip()
except IOError:
key = input(text)
return AES.new(key, AES.MODE_CBC, iv)
def encrypt(bytes):
iv = os.urandom(16)
c = get_cipher(iv, "Please enter an encryption key: ")
return (iv, c.encrypt(bytes))
def decrypt():
with open(ENCRYPTED_FILE, 'rb') as f:
bytes = f.read()
c = get_cipher(bytes[:16], "Please enter the decryption key: ")
return c.decrypt(bytes[16:]).strip(b'\x04')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length)))
|
f925c2378b731add65b57ebd1a66392166d558b9
|
pyQuantuccia/setup.py
|
pyQuantuccia/setup.py
|
import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
|
import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
|
Correct the name of the file.
|
Correct the name of the file.
|
Python
|
bsd-3-clause
|
jwg4/pyQuantuccia,jwg4/pyQuantuccia
|
import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
Correct the name of the file.
|
import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
|
<commit_before>import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
<commit_msg>Correct the name of the file.<commit_after>
|
import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
|
import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
Correct the name of the file.import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
|
<commit_before>import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
<commit_msg>Correct the name of the file.<commit_after>import setuptools
qu_ext = setuptools.Extension(
'quantuccia',
sources=['src/pyQuantuccia.cpp']
)
setuptools.setup(
name='pyQuantuccia',
version='0.1.0',
packages=['pyQuantuccia'],
ext_modules=[qu_ext]
)
|
51bb7408f8a479a75200ec1b01a05cb65982d060
|
config.py
|
config.py
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("DEV_DATABASE_URL")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("TEST_DATABASE_URL")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("MYDICTIONARY_SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_DEV_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "dev_database.sqlite")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_TEST_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "test_database.sqlite")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}
|
Change environment variable names and add placeholders
|
Change environment variable names and add placeholders
|
Python
|
mit
|
Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("DEV_DATABASE_URL")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("TEST_DATABASE_URL")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}Change environment variable names and add placeholders
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("MYDICTIONARY_SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_DEV_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "dev_database.sqlite")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_TEST_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "test_database.sqlite")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}
|
<commit_before>import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("DEV_DATABASE_URL")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("TEST_DATABASE_URL")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}<commit_msg>Change environment variable names and add placeholders<commit_after>
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("MYDICTIONARY_SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_DEV_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "dev_database.sqlite")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_TEST_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "test_database.sqlite")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("DEV_DATABASE_URL")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("TEST_DATABASE_URL")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}Change environment variable names and add placeholdersimport os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("MYDICTIONARY_SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_DEV_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "dev_database.sqlite")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_TEST_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "test_database.sqlite")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}
|
<commit_before>import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("DEV_DATABASE_URL")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("TEST_DATABASE_URL")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}<commit_msg>Change environment variable names and add placeholders<commit_after>import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
""" Base configuration with values used in all configurations. """
SERVER_NAME = "localhost:5000"
SECRET_KEY = os.getenv("MYDICTIONARY_SECRET_KEY")
SQLALCHEMY_RECORD_QUERIES = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
class DevelopmentConfig(Config):
"""
Development configuration.
Activates the debugger and uses the database specified
in the DEV_DATABASE_URL environment variable.
"""
DEBUG = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_DEV_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "dev_database.sqlite")
class TestingConfig(Config):
"""
Testing configuration.
Sets the testing flag to True and uses the database
specified in the TEST_DATABASE_URL environment variable.
"""
TESTING = True
SQLALCHEMY_DATABASE_URI = os.getenv("MYDICTIONARY_TEST_DATABASE_URL") or \
"sqlite:///" + os.path.join(basedir, "test_database.sqlite")
config = {
"development": DevelopmentConfig,
"testing": TestingConfig,
"default": DevelopmentConfig
}
|
a19a5dfacd09ffebe8fdc2f5edcbf1aec6d73751
|
tests/django_settings.py
|
tests/django_settings.py
|
# Minimum settings that are needed to run django test suite
import os
import tempfile
SECRET_KEY = 'WE DONT CARE ABOUT IT'
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
|
# Minimum settings that are needed to run django test suite
import os
import secrets
import tempfile
SECRET_KEY = secrets.token_hex()
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
|
Use a different django secret key for each test run.
|
Use a different django secret key for each test run.
|
Python
|
bsd-3-clause
|
smn/django-dirtyfields,romgar/django-dirtyfields
|
# Minimum settings that are needed to run django test suite
import os
import tempfile
SECRET_KEY = 'WE DONT CARE ABOUT IT'
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
Use a different django secret key for each test run.
|
# Minimum settings that are needed to run django test suite
import os
import secrets
import tempfile
SECRET_KEY = secrets.token_hex()
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
|
<commit_before># Minimum settings that are needed to run django test suite
import os
import tempfile
SECRET_KEY = 'WE DONT CARE ABOUT IT'
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
<commit_msg>Use a different django secret key for each test run.<commit_after>
|
# Minimum settings that are needed to run django test suite
import os
import secrets
import tempfile
SECRET_KEY = secrets.token_hex()
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
|
# Minimum settings that are needed to run django test suite
import os
import tempfile
SECRET_KEY = 'WE DONT CARE ABOUT IT'
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
Use a different django secret key for each test run.# Minimum settings that are needed to run django test suite
import os
import secrets
import tempfile
SECRET_KEY = secrets.token_hex()
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
|
<commit_before># Minimum settings that are needed to run django test suite
import os
import tempfile
SECRET_KEY = 'WE DONT CARE ABOUT IT'
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
<commit_msg>Use a different django secret key for each test run.<commit_after># Minimum settings that are needed to run django test suite
import os
import secrets
import tempfile
SECRET_KEY = secrets.token_hex()
if "postgresql" in os.getenv("TOX_ENV_NAME", ""):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
# Should be the same defined in .travis.yml
'NAME': 'dirtyfields_test',
# postgres user is by default created in travis-ci
'USER': os.getenv('POSTGRES_USER', 'postgres'),
# postgres user has no password on travis-ci
'PASSWORD': os.getenv('POSTGRES_PASSWORD', ''),
'HOST': 'localhost',
'PORT': '5432', # default postgresql port
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dirtyfields.db',
}
}
INSTALLED_APPS = ('tests', )
MEDIA_ROOT = tempfile.mkdtemp(prefix="django-dirtyfields-test-media-root-")
|
0f688ac6b05caada0f2b72e5e6bc484c1b45ac04
|
fmriprep/workflows/bold/__init__.py
|
fmriprep/workflows/bold/__init__.py
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.t2s
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .t2s import init_bold_t2s_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
|
Update bold init for imports
|
Update bold init for imports
|
Python
|
bsd-3-clause
|
poldracklab/fmriprep,oesteban/preprocessing-workflow,poldracklab/preprocessing-workflow,oesteban/fmriprep,poldracklab/preprocessing-workflow,poldracklab/fmriprep,oesteban/fmriprep,oesteban/preprocessing-workflow,oesteban/fmriprep,poldracklab/fmriprep
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
Update bold init for imports
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.t2s
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .t2s import init_bold_t2s_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
|
<commit_before># -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
<commit_msg>Update bold init for imports<commit_after>
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.t2s
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .t2s import init_bold_t2s_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
|
# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
Update bold init for imports# -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.t2s
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .t2s import init_bold_t2s_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
|
<commit_before># -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
<commit_msg>Update bold init for imports<commit_after># -*- coding: utf-8 -*-
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
# pylint: disable=unused-import
"""
Pre-processing fMRI - BOLD signal workflows
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: fmriprep.workflows.bold.base
.. automodule:: fmriprep.workflows.bold.util
.. automodule:: fmriprep.workflows.bold.hmc
.. automodule:: fmriprep.workflows.bold.stc
.. automodule:: fmriprep.workflows.bold.t2s
.. automodule:: fmriprep.workflows.bold.registration
.. automodule:: fmriprep.workflows.bold.resampling
.. automodule:: fmriprep.workflows.bold.confounds
"""
from .base import init_func_preproc_wf
from .util import init_bold_reference_wf
from .hmc import init_bold_hmc_wf
from .stc import init_bold_stc_wf
from .t2s import init_bold_t2s_wf
from .registration import init_bold_reg_wf
from .resampling import (
init_bold_mni_trans_wf,
init_bold_surf_wf,
init_bold_preproc_trans_wf,
)
from .confounds import (
init_bold_confs_wf,
init_ica_aroma_wf,
)
|
7f93f3a8b8fb703588b7f1b5fee9856d0a597636
|
tests/test_serialize.py
|
tests/test_serialize.py
|
from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
|
from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
@given(binary(min_size=rtphdr.size + rtpevent.size,
max_size=rtphdr.size + rtpevent.size))
def test_rtp_and_rtpevent_decode_inverts_encode(pkt):
rtp = parse_rtp(pkt)
rtpevent = parse_rtpevent(rtp.pop('payload'))
rtp['payload'] = pack_rtpevent(rtpevent)
assert pack_rtp(rtp) == pkt
|
Add test make sure rtpevent inside rtp parses
|
Add test make sure rtpevent inside rtp parses
|
Python
|
apache-2.0
|
vodik/aiortp
|
from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
Add test make sure rtpevent inside rtp parses
|
from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
@given(binary(min_size=rtphdr.size + rtpevent.size,
max_size=rtphdr.size + rtpevent.size))
def test_rtp_and_rtpevent_decode_inverts_encode(pkt):
rtp = parse_rtp(pkt)
rtpevent = parse_rtpevent(rtp.pop('payload'))
rtp['payload'] = pack_rtpevent(rtpevent)
assert pack_rtp(rtp) == pkt
|
<commit_before>from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
<commit_msg>Add test make sure rtpevent inside rtp parses<commit_after>
|
from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
@given(binary(min_size=rtphdr.size + rtpevent.size,
max_size=rtphdr.size + rtpevent.size))
def test_rtp_and_rtpevent_decode_inverts_encode(pkt):
rtp = parse_rtp(pkt)
rtpevent = parse_rtpevent(rtp.pop('payload'))
rtp['payload'] = pack_rtpevent(rtpevent)
assert pack_rtp(rtp) == pkt
|
from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
Add test make sure rtpevent inside rtp parsesfrom hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
@given(binary(min_size=rtphdr.size + rtpevent.size,
max_size=rtphdr.size + rtpevent.size))
def test_rtp_and_rtpevent_decode_inverts_encode(pkt):
rtp = parse_rtp(pkt)
rtpevent = parse_rtpevent(rtp.pop('payload'))
rtp['payload'] = pack_rtpevent(rtpevent)
assert pack_rtp(rtp) == pkt
|
<commit_before>from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
<commit_msg>Add test make sure rtpevent inside rtp parses<commit_after>from hypothesis import given
from hypothesis.strategies import binary
from aiortp.packet import rtphdr, pack_rtp, parse_rtp
from aiortp.packet import rtpevent, pack_rtpevent, parse_rtpevent
@given(binary(min_size=rtphdr.size, max_size=rtphdr.size + 1000))
def test_rtp_decode_inverts_encode(pkt):
assert pack_rtp(parse_rtp(pkt)) == pkt
@given(binary(min_size=rtpevent.size, max_size=rtpevent.size))
def test_rtpevent_decode_inverts_encode(pkt):
assert pack_rtpevent(parse_rtpevent(pkt)) == pkt
@given(binary(min_size=rtphdr.size + rtpevent.size,
max_size=rtphdr.size + rtpevent.size))
def test_rtp_and_rtpevent_decode_inverts_encode(pkt):
rtp = parse_rtp(pkt)
rtpevent = parse_rtpevent(rtp.pop('payload'))
rtp['payload'] = pack_rtpevent(rtpevent)
assert pack_rtp(rtp) == pkt
|
c371d3663fc1de7d99246d97ec054c7da865e4cf
|
testshop/test_models.py
|
testshop/test_models.py
|
# -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress, BillingAddress # noqa
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
billing_addr = BillingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
|
# -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
address = {'addressee': "John Doe", 'street': "31, Orwell Rd", 'zip_code': "L41RG",
'location': "Liverpool", 'country': 'UK'}
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer, **address)
self.assertGreaterEqual(shipping_addr.id, 1)
addr_block = "John Doe\n31, Orwell Rd\nL41RG Liverpool\nUK"
self.assertMultiLineEqual(shipping_addr.as_text(), addr_block)
self.assertEqual(ShippingAddress.objects.get_max_priority(self.customer), 1)
self.assertEqual(ShippingAddress.objects.get_fallback(self.customer), shipping_addr)
|
Address model testing coverage: 100%
|
Address model testing coverage: 100%
|
Python
|
bsd-3-clause
|
jrief/django-shop,khchine5/django-shop,khchine5/django-shop,rfleschenberg/django-shop,rfleschenberg/django-shop,divio/django-shop,khchine5/django-shop,awesto/django-shop,awesto/django-shop,jrief/django-shop,rfleschenberg/django-shop,nimbis/django-shop,nimbis/django-shop,rfleschenberg/django-shop,khchine5/django-shop,awesto/django-shop,jrief/django-shop,nimbis/django-shop,divio/django-shop,jrief/django-shop,divio/django-shop,nimbis/django-shop
|
# -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress, BillingAddress # noqa
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
billing_addr = BillingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
Address model testing coverage: 100%
|
# -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
address = {'addressee': "John Doe", 'street': "31, Orwell Rd", 'zip_code': "L41RG",
'location': "Liverpool", 'country': 'UK'}
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer, **address)
self.assertGreaterEqual(shipping_addr.id, 1)
addr_block = "John Doe\n31, Orwell Rd\nL41RG Liverpool\nUK"
self.assertMultiLineEqual(shipping_addr.as_text(), addr_block)
self.assertEqual(ShippingAddress.objects.get_max_priority(self.customer), 1)
self.assertEqual(ShippingAddress.objects.get_fallback(self.customer), shipping_addr)
|
<commit_before># -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress, BillingAddress # noqa
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
billing_addr = BillingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
<commit_msg>Address model testing coverage: 100%<commit_after>
|
# -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
address = {'addressee': "John Doe", 'street': "31, Orwell Rd", 'zip_code': "L41RG",
'location': "Liverpool", 'country': 'UK'}
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer, **address)
self.assertGreaterEqual(shipping_addr.id, 1)
addr_block = "John Doe\n31, Orwell Rd\nL41RG Liverpool\nUK"
self.assertMultiLineEqual(shipping_addr.as_text(), addr_block)
self.assertEqual(ShippingAddress.objects.get_max_priority(self.customer), 1)
self.assertEqual(ShippingAddress.objects.get_fallback(self.customer), shipping_addr)
|
# -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress, BillingAddress # noqa
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
billing_addr = BillingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
Address model testing coverage: 100%# -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
address = {'addressee': "John Doe", 'street': "31, Orwell Rd", 'zip_code': "L41RG",
'location': "Liverpool", 'country': 'UK'}
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer, **address)
self.assertGreaterEqual(shipping_addr.id, 1)
addr_block = "John Doe\n31, Orwell Rd\nL41RG Liverpool\nUK"
self.assertMultiLineEqual(shipping_addr.as_text(), addr_block)
self.assertEqual(ShippingAddress.objects.get_max_priority(self.customer), 1)
self.assertEqual(ShippingAddress.objects.get_fallback(self.customer), shipping_addr)
|
<commit_before># -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress, BillingAddress # noqa
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
billing_addr = BillingAddress.objects.create(priority=1, customer=self.customer)
self.assertGreaterEqual(shipping_addr.id, 1)
<commit_msg>Address model testing coverage: 100%<commit_after># -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from django.contrib.auth import get_user_model
from shop.models.defaults.address import ShippingAddress
from shop.models.defaults.customer import Customer
class AddressTest(TestCase):
def setUp(self):
super(AddressTest, self).setUp()
User = get_user_model()
user = {
'username': 'john',
'first_name': 'John',
'last_name': 'Doe',
'email': 'john@example.com',
'password': 'secret',
}
user = User.objects.create(**user)
self.customer = Customer.objects.create(user=user)
self.assertGreaterEqual(self.customer.pk, 1)
def test_shipping_address(self):
address = {'addressee': "John Doe", 'street': "31, Orwell Rd", 'zip_code': "L41RG",
'location': "Liverpool", 'country': 'UK'}
shipping_addr = ShippingAddress.objects.create(priority=1, customer=self.customer, **address)
self.assertGreaterEqual(shipping_addr.id, 1)
addr_block = "John Doe\n31, Orwell Rd\nL41RG Liverpool\nUK"
self.assertMultiLineEqual(shipping_addr.as_text(), addr_block)
self.assertEqual(ShippingAddress.objects.get_max_priority(self.customer), 1)
self.assertEqual(ShippingAddress.objects.get_fallback(self.customer), shipping_addr)
|
32a093a95bb1b94fba3ea36dc10b6e81086d9a5b
|
dbaas/dbaas_services/analyzing/tasks/analyze.py
|
dbaas/dbaas_services/analyzing/tasks/analyze.py
|
# -*- coding: utf-8 -*-
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
@app.task
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
|
# -*- coding: utf-8 -*-
import logging
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
LOG = logging.getLogger(__name__)
@app.task(bind=True)
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
try:
analyze_service = AnalyzeService(endpoint, healh_check_route,
healh_check_string)
except ServiceNotAvailable as e:
LOG.warn(e)
return
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
|
Check if service is working
|
Check if service is working
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
# -*- coding: utf-8 -*-
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
@app.task
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
Check if service is working
|
# -*- coding: utf-8 -*-
import logging
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
LOG = logging.getLogger(__name__)
@app.task(bind=True)
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
try:
analyze_service = AnalyzeService(endpoint, healh_check_route,
healh_check_string)
except ServiceNotAvailable as e:
LOG.warn(e)
return
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
|
<commit_before># -*- coding: utf-8 -*-
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
@app.task
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
<commit_msg>Check if service is working<commit_after>
|
# -*- coding: utf-8 -*-
import logging
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
LOG = logging.getLogger(__name__)
@app.task(bind=True)
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
try:
analyze_service = AnalyzeService(endpoint, healh_check_route,
healh_check_string)
except ServiceNotAvailable as e:
LOG.warn(e)
return
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
|
# -*- coding: utf-8 -*-
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
@app.task
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
Check if service is working# -*- coding: utf-8 -*-
import logging
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
LOG = logging.getLogger(__name__)
@app.task(bind=True)
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
try:
analyze_service = AnalyzeService(endpoint, healh_check_route,
healh_check_string)
except ServiceNotAvailable as e:
LOG.warn(e)
return
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
|
<commit_before># -*- coding: utf-8 -*-
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
@app.task
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
<commit_msg>Check if service is working<commit_after># -*- coding: utf-8 -*-
import logging
from dbaas.celery import app
from account.models import User
from logical.models import Database
from util.decorators import only_one
from simple_audit.models import AuditRequest
from dbaas_services.analyzing.integration import AnalyzeService
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
LOG = logging.getLogger(__name__)
@app.task(bind=True)
@only_one(key="analyze_databases_service_task", timeout=6000)
def analyze_databases(self, endpoint, healh_check_route, healh_check_string,
**kwargs):
user = User.objects.get(username='admin')
AuditRequest.new_request("analyze_databases", user, "localhost")
try:
try:
analyze_service = AnalyzeService(endpoint, healh_check_route,
healh_check_string)
except ServiceNotAvailable as e:
LOG.warn(e)
return
databases = Database.objects.filter(is_in_quarantine=False)
for database in databases:
database_name, engine, instances = setup_database_info(database)
result = analyze_service.run(engine=engine, database_name=database_name,
instances=instances, **kwargs)
print result
except Exception:
pass
finally:
AuditRequest.cleanup_request()
def setup_database_info(database):
databaseinfra = database.databaseinfra
driver = databaseinfra.get_driver()
database_instances = driver.get_database_instances()
instances = [db_instance.dns.split('.')[0] for db_instance in database_instances]
return database.name, database.engine_type, instances
|
8fc6ba648347a48065ab2fb26f940dc92919feeb
|
bands/__init__.py
|
bands/__init__.py
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
c.MENU['People'].append_menu_item(
MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
)
|
Implement new python-based menu format
|
Implement new python-based menu format
|
Python
|
agpl-3.0
|
magfest/bands,magfest/bands
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
Implement new python-based menu format
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
c.MENU['People'].append_menu_item(
MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
)
|
<commit_before>import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
<commit_msg>Implement new python-based menu format<commit_after>
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
c.MENU['People'].append_menu_item(
MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
)
|
import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
Implement new python-based menu formatimport shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
c.MENU['People'].append_menu_item(
MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
)
|
<commit_before>import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
<commit_msg>Implement new python-based menu format<commit_after>import shutil
from cherrypy.lib.static import serve_file
from uber.common import *
from panels import *
from bands._version import __version__
from bands.config import *
from bands.models import *
import bands.model_checks
import bands.automated_emails
static_overrides(join(bands_config['module_root'], 'static'))
template_overrides(join(bands_config['module_root'], 'templates'))
mount_site_sections(bands_config['module_root'])
c.MENU['People'].append_menu_item(
MenuItem(access=c.BANDS, name='Bands', href='../band_admin/')
)
|
ba649e4bce746f19712f127ac15e77345a5ec837
|
parkings/api/public/parking_area_statistics.py
|
parkings/api/public/parking_area_statistics.py
|
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import Parking, ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
count = Parking.objects.filter(
parking_area=area,
time_end__gte=timezone.now(),
time_start__lte=timezone.now(),
).count()
return self.blur_count(count)
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
|
from django.db.models import Case, Count, When
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
return self.blur_count(area['current_parking_count'])
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
def get_queryset(self):
now = timezone.now()
return ParkingArea.objects.annotate(
current_parking_count=Count(
Case(
When(
parking__time_start__lte=now,
parking__time_end__gte=now,
then=1,
)
)
)
).values('id', 'current_parking_count')
|
Improve parking area statistics performance
|
Improve parking area statistics performance
|
Python
|
mit
|
tuomas777/parkkihubi
|
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import Parking, ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
count = Parking.objects.filter(
parking_area=area,
time_end__gte=timezone.now(),
time_start__lte=timezone.now(),
).count()
return self.blur_count(count)
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
Improve parking area statistics performance
|
from django.db.models import Case, Count, When
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
return self.blur_count(area['current_parking_count'])
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
def get_queryset(self):
now = timezone.now()
return ParkingArea.objects.annotate(
current_parking_count=Count(
Case(
When(
parking__time_start__lte=now,
parking__time_end__gte=now,
then=1,
)
)
)
).values('id', 'current_parking_count')
|
<commit_before>from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import Parking, ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
count = Parking.objects.filter(
parking_area=area,
time_end__gte=timezone.now(),
time_start__lte=timezone.now(),
).count()
return self.blur_count(count)
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
<commit_msg>Improve parking area statistics performance<commit_after>
|
from django.db.models import Case, Count, When
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
return self.blur_count(area['current_parking_count'])
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
def get_queryset(self):
now = timezone.now()
return ParkingArea.objects.annotate(
current_parking_count=Count(
Case(
When(
parking__time_start__lte=now,
parking__time_end__gte=now,
then=1,
)
)
)
).values('id', 'current_parking_count')
|
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import Parking, ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
count = Parking.objects.filter(
parking_area=area,
time_end__gte=timezone.now(),
time_start__lte=timezone.now(),
).count()
return self.blur_count(count)
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
Improve parking area statistics performancefrom django.db.models import Case, Count, When
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
return self.blur_count(area['current_parking_count'])
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
def get_queryset(self):
now = timezone.now()
return ParkingArea.objects.annotate(
current_parking_count=Count(
Case(
When(
parking__time_start__lte=now,
parking__time_end__gte=now,
then=1,
)
)
)
).values('id', 'current_parking_count')
|
<commit_before>from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import Parking, ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
count = Parking.objects.filter(
parking_area=area,
time_end__gte=timezone.now(),
time_start__lte=timezone.now(),
).count()
return self.blur_count(count)
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
<commit_msg>Improve parking area statistics performance<commit_after>from django.db.models import Case, Count, When
from django.utils import timezone
from rest_framework import serializers, viewsets
from parkings.models import ParkingArea
from ..common import WGS84InBBoxFilter
class ParkingAreaStatisticsSerializer(serializers.ModelSerializer):
current_parking_count = serializers.SerializerMethodField()
def get_current_parking_count(self, area):
return self.blur_count(area['current_parking_count'])
def blur_count(self, count):
"""
Returns a blurred count, which is supposed to hide individual
parkings.
"""
if count <= 3:
return 0
else:
return count
class Meta:
model = ParkingArea
fields = (
'id',
'current_parking_count',
)
class PublicAPIParkingAreaStatisticsViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ParkingArea.objects.all()
serializer_class = ParkingAreaStatisticsSerializer
bbox_filter_field = 'areas'
filter_backends = (WGS84InBBoxFilter,)
bbox_filter_include_overlapping = True
def get_queryset(self):
now = timezone.now()
return ParkingArea.objects.annotate(
current_parking_count=Count(
Case(
When(
parking__time_start__lte=now,
parking__time_end__gte=now,
then=1,
)
)
)
).values('id', 'current_parking_count')
|
23df1ed7a02f3c120a0d5075b27cc92f3e1b6429
|
src/chime_dash/app/components/navbar.py
|
src/chime_dash/app/components/navbar.py
|
"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
)
)
return [nav]
|
"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
children=dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
),
dark=True,
fixed="top",
color="dark"
)
return [nav]
|
Fix nav at top of window. Use dark theme to distinguish from content.
|
Fix nav at top of window. Use dark theme to distinguish from content.
|
Python
|
mit
|
CodeForPhilly/chime,CodeForPhilly/chime,CodeForPhilly/chime
|
"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
)
)
return [nav]
Fix nav at top of window. Use dark theme to distinguish from content.
|
"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
children=dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
),
dark=True,
fixed="top",
color="dark"
)
return [nav]
|
<commit_before>"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
)
)
return [nav]
<commit_msg>Fix nav at top of window. Use dark theme to distinguish from content.<commit_after>
|
"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
children=dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
),
dark=True,
fixed="top",
color="dark"
)
return [nav]
|
"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
)
)
return [nav]
Fix nav at top of window. Use dark theme to distinguish from content."""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
children=dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
),
dark=True,
fixed="top",
color="dark"
)
return [nav]
|
<commit_before>"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
)
)
return [nav]
<commit_msg>Fix nav at top of window. Use dark theme to distinguish from content.<commit_after>"""Navigation bar view
"""
from typing import List
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.development.base_component import ComponentMeta
from penn_chime.defaults import Constants
from penn_chime.settings import DEFAULTS
from chime_dash.app.components.base import Component
from chime_dash.app.components.menu import Menu
class Navbar(Component):
"""
"""
def __init__(self, language: str = "en", defaults: Constants = DEFAULTS):
"""Sets up self, menue and header
"""
super().__init__(language, defaults=defaults)
self.menu = Menu(language, defaults=defaults)
def get_html(self) -> List[ComponentMeta]:
"""Initialize the navigation bar
"""
nav = dbc.Navbar(
children=dbc.Container(
[
html.A(
dbc.Row(
children=[
dbc.Col(
dbc.NavbarBrand(
children="Penn Medicine CHIME", href="/"
)
),
],
align="center",
no_gutters=True,
),
href="https://www.pennmedicine.org/",
),
]
+ self.menu.html
),
dark=True,
fixed="top",
color="dark"
)
return [nav]
|
7ec28d5b8be40b505a20a4670857278ad41f760b
|
src/puzzle/puzzlepedia/puzzlepedia.py
|
src/puzzle/puzzlepedia/puzzlepedia.py
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None, threshold=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
Allow "threshold" to be specified during parse(...).
|
Allow "threshold" to be specified during parse(...).
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
Allow "threshold" to be specified during parse(...).
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None, threshold=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
<commit_before>from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
<commit_msg>Allow "threshold" to be specified during parse(...).<commit_after>
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None, threshold=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
Allow "threshold" to be specified during parse(...).from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None, threshold=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
<commit_before>from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
<commit_msg>Allow "threshold" to be specified during parse(...).<commit_after>from IPython import display
from puzzle.puzzlepedia import prod_config, puzzle, puzzle_widget
_INITIALIZED = False
def parse(source, hint=None, threshold=None):
_init()
result = puzzle.Puzzle('first stage', source, hint=hint, threshold=threshold)
interact_with(result)
return result
def interact_with(puzzle):
_init()
display.display(puzzle_widget.PuzzleWidget(puzzle))
def _init():
global _INITIALIZED
if not _INITIALIZED:
_INITIALIZED = True
prod_config.init()
def reset():
global _INITIALIZED
_INITIALIZED = False
prod_config.reset()
|
2c1cbdfcb28595e945e6069501652f0869733549
|
bids/reports/report.py
|
bids/reports/report.py
|
"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
|
"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from __future__ import print_function
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
|
Add future import for Py2.
|
Add future import for Py2.
|
Python
|
mit
|
INCF/pybids
|
"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
Add future import for Py2.
|
"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from __future__ import print_function
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
|
<commit_before>"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
<commit_msg>Add future import for Py2.<commit_after>
|
"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from __future__ import print_function
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
|
"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
Add future import for Py2."""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from __future__ import print_function
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
|
<commit_before>"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
<commit_msg>Add future import for Py2.<commit_after>"""Generate publication-quality data acquisition methods section from BIDS dataset.
"""
from __future__ import print_function
from collections import Counter
from bids.reports import utils
class BIDSReport(object):
"""
Generates publication-quality data acquisition methods section from BIDS
dataset.
"""
def __init__(self, layout):
self.layout = layout
def generate(self, task_converter=None):
"""Generate the methods section.
"""
if task_converter is None:
task_converter = {}
descriptions = []
sessions = self.layout.get_sessions()
if sessions:
for ses in sessions:
subjs = self.layout.get_subjects(session=ses)
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=ses,
task_converter=task_converter)
descriptions.append(description)
else:
subjs = self.layout.get_subjects()
for sid in subjs:
description = utils.report(self.layout, subj=sid, ses=None,
task_converter=task_converter)
descriptions.append(description)
counter = Counter(descriptions)
print('Number of patterns detected: {0}'.format(len(counter.keys())))
return counter
|
bdacb243867e1d6cef3573fec383e9069e11523e
|
eche/tests/test_step1_raed_print.py
|
eche/tests/test_step1_raed_print.py
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
Add list with list test.
|
Add list with list test.
|
Python
|
mit
|
skk/eche
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
Add list with list test.
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
<commit_before>import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
<commit_msg>Add list with list test.<commit_after>
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
Add list with list test.import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
<commit_before>import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
<commit_msg>Add list with list test.<commit_after>import pytest
from eche.reader import read_str
from eche.printer import print_str
import math
@pytest.mark.parametrize("test_input", [
'1',
'-1',
'0',
str(math.pi),
str(math.e)
])
def test_numbers(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'*',
'+',
'abc',
'test1',
'abc-def',
])
def test_eche_type_symbol(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'((9 8))',
'()',
'(* 1 2)',
'(+ (* 1 5) (/ 1 0))'
])
def test_eche_type_list(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'nil',
])
def test_nil(test_input):
assert print_str(read_str(test_input)) == test_input
@pytest.mark.parametrize("test_input", [
'true',
'false',
])
def test_bool(test_input):
assert print_str(read_str(test_input)) == test_input
|
420a786fb9c5bf476e2c444b181161e75ca801f8
|
glitter_news/admin.py
|
glitter_news/admin.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_sticky', 'is_published')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': (
'title', 'category', 'is_sticky', 'author', 'date', 'image', 'summary', 'tags',
)
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_published', 'is_sticky')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'is_sticky', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': ('title', 'category', 'author', 'date', 'image', 'summary', 'tags')
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
|
Move is_sticky field into post's advanced options
|
Move is_sticky field into post's advanced options
For #13
|
Python
|
bsd-2-clause
|
blancltd/glitter-news
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_sticky', 'is_published')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': (
'title', 'category', 'is_sticky', 'author', 'date', 'image', 'summary', 'tags',
)
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
Move is_sticky field into post's advanced options
For #13
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_published', 'is_sticky')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'is_sticky', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': ('title', 'category', 'author', 'date', 'image', 'summary', 'tags')
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_sticky', 'is_published')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': (
'title', 'category', 'is_sticky', 'author', 'date', 'image', 'summary', 'tags',
)
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
<commit_msg>Move is_sticky field into post's advanced options
For #13<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_published', 'is_sticky')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'is_sticky', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': ('title', 'category', 'author', 'date', 'image', 'summary', 'tags')
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_sticky', 'is_published')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': (
'title', 'category', 'is_sticky', 'author', 'date', 'image', 'summary', 'tags',
)
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
Move is_sticky field into post's advanced options
For #13# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_published', 'is_sticky')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'is_sticky', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': ('title', 'category', 'author', 'date', 'image', 'summary', 'tags')
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_sticky', 'is_published')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': (
'title', 'category', 'is_sticky', 'author', 'date', 'image', 'summary', 'tags',
)
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
<commit_msg>Move is_sticky field into post's advanced options
For #13<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.contrib import admin
from adminsortable.admin import SortableAdmin
from glitter import block_admin
from glitter.admin import GlitterAdminMixin, GlitterPagePublishedFilter
from .models import Category, LatestNewsBlock, Post
@admin.register(Category)
class CategoryAdmin(SortableAdmin):
prepopulated_fields = {
'slug': ('title',)
}
@admin.register(Post)
class PostAdmin(GlitterAdminMixin, admin.ModelAdmin):
date_hierarchy = 'date'
list_display = ('title', 'date', 'category', 'is_published', 'is_sticky')
list_filter = (GlitterPagePublishedFilter, 'date', 'category')
prepopulated_fields = {
'slug': ('title',)
}
def get_fieldsets(self, request, obj=None):
advanced_options = ['published', 'is_sticky', 'slug']
if getattr(settings, 'GLITTER_NEWS_TAGS', False):
advanced_options.append('tags')
fieldsets = (
('Post', {
'fields': ('title', 'category', 'author', 'date', 'image', 'summary', 'tags')
}),
('Advanced options', {
'fields': advanced_options
}),
)
return fieldsets
block_admin.site.register(LatestNewsBlock)
block_admin.site.register_block(LatestNewsBlock, 'App Blocks')
|
221134b178bc4106bc39c7eb3120395c27473963
|
json2csv_business.py
|
json2csv_business.py
|
import json
def main():
# print the header of output csv file
print 'business_id,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
|
import json
def main():
# print the header of output csv file
print 'business_id,name,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
name = input_json['name'].encode('ascii', 'ignore').replace(',', '')
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + name + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
|
Add business name to csv output
|
Add business name to csv output
|
Python
|
mit
|
aysent/yelp-photo-explorer
|
import json
def main():
# print the header of output csv file
print 'business_id,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
Add business name to csv output
|
import json
def main():
# print the header of output csv file
print 'business_id,name,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
name = input_json['name'].encode('ascii', 'ignore').replace(',', '')
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + name + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
|
<commit_before>import json
def main():
# print the header of output csv file
print 'business_id,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
<commit_msg>Add business name to csv output<commit_after>
|
import json
def main():
# print the header of output csv file
print 'business_id,name,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
name = input_json['name'].encode('ascii', 'ignore').replace(',', '')
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + name + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
|
import json
def main():
# print the header of output csv file
print 'business_id,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
Add business name to csv outputimport json
def main():
# print the header of output csv file
print 'business_id,name,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
name = input_json['name'].encode('ascii', 'ignore').replace(',', '')
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + name + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
|
<commit_before>import json
def main():
# print the header of output csv file
print 'business_id,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
<commit_msg>Add business name to csv output<commit_after>import json
def main():
# print the header of output csv file
print 'business_id,name,city,latitude,longitude'
# for each entry in input json file print one csv row
for line in open("data/yelp_academic_dataset_business.json"):
input_json = json.loads(line)
business_id = input_json['business_id']
name = input_json['name'].encode('ascii', 'ignore').replace(',', '')
city = input_json['city'].encode('ascii', 'ignore').replace(',', '')
latitude = str(input_json['latitude'])
longitude = str(input_json['longitude'])
print business_id + ',' + name + ',' + city + ',' + latitude + ',' + longitude
if __name__ == "__main__":
main()
|
49d4d3fd5d040efb99bc487b9648e354a39df20d
|
test/testlib/schema.py
|
test/testlib/schema.py
|
import testbase
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
|
import testbase
from testlib import testing
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
# Apply some default cascading rules for self-referential foreign keys.
# MySQL InnoDB has some issues around seleting self-refs too.
if testing.against('firebird'):
table_name = args[0]
unpack = (testing.config.db.dialect.
identifier_preparer.unformat_identifiers)
# Only going after ForeignKeys in Columns. May need to
# expand to ForeignKeyConstraint too.
fks = [fk
for col in args if isinstance(col, schema.Column)
for fk in col.args if isinstance(fk, schema.ForeignKey)]
for fk in fks:
# root around in raw spec
ref = fk._colspec
if isinstance(ref, schema.Column):
name = ref.table.name
else:
name = unpack(ref)[-2]
print name, table_name
if name == table_name:
if fk.ondelete is None:
fk.ondelete = 'CASCADE'
if fk.onupdate is None:
fk.onupdate = 'CASCADE'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
|
Apply default cascade rules for firebird self-ref ForeignKeys.
|
Apply default cascade rules for firebird self-ref ForeignKeys.
git-svn-id: 655ff90ec95d1eeadb1ee4bb9db742a3c015d499@3959 8cd8332f-0806-0410-a4b6-96f4b9520244
|
Python
|
mit
|
obeattie/sqlalchemy,obeattie/sqlalchemy,obeattie/sqlalchemy
|
import testbase
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
Apply default cascade rules for firebird self-ref ForeignKeys.
git-svn-id: 655ff90ec95d1eeadb1ee4bb9db742a3c015d499@3959 8cd8332f-0806-0410-a4b6-96f4b9520244
|
import testbase
from testlib import testing
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
# Apply some default cascading rules for self-referential foreign keys.
# MySQL InnoDB has some issues around seleting self-refs too.
if testing.against('firebird'):
table_name = args[0]
unpack = (testing.config.db.dialect.
identifier_preparer.unformat_identifiers)
# Only going after ForeignKeys in Columns. May need to
# expand to ForeignKeyConstraint too.
fks = [fk
for col in args if isinstance(col, schema.Column)
for fk in col.args if isinstance(fk, schema.ForeignKey)]
for fk in fks:
# root around in raw spec
ref = fk._colspec
if isinstance(ref, schema.Column):
name = ref.table.name
else:
name = unpack(ref)[-2]
print name, table_name
if name == table_name:
if fk.ondelete is None:
fk.ondelete = 'CASCADE'
if fk.onupdate is None:
fk.onupdate = 'CASCADE'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
|
<commit_before>import testbase
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
<commit_msg>Apply default cascade rules for firebird self-ref ForeignKeys.
git-svn-id: 655ff90ec95d1eeadb1ee4bb9db742a3c015d499@3959 8cd8332f-0806-0410-a4b6-96f4b9520244<commit_after>
|
import testbase
from testlib import testing
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
# Apply some default cascading rules for self-referential foreign keys.
# MySQL InnoDB has some issues around seleting self-refs too.
if testing.against('firebird'):
table_name = args[0]
unpack = (testing.config.db.dialect.
identifier_preparer.unformat_identifiers)
# Only going after ForeignKeys in Columns. May need to
# expand to ForeignKeyConstraint too.
fks = [fk
for col in args if isinstance(col, schema.Column)
for fk in col.args if isinstance(fk, schema.ForeignKey)]
for fk in fks:
# root around in raw spec
ref = fk._colspec
if isinstance(ref, schema.Column):
name = ref.table.name
else:
name = unpack(ref)[-2]
print name, table_name
if name == table_name:
if fk.ondelete is None:
fk.ondelete = 'CASCADE'
if fk.onupdate is None:
fk.onupdate = 'CASCADE'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
|
import testbase
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
Apply default cascade rules for firebird self-ref ForeignKeys.
git-svn-id: 655ff90ec95d1eeadb1ee4bb9db742a3c015d499@3959 8cd8332f-0806-0410-a4b6-96f4b9520244import testbase
from testlib import testing
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
# Apply some default cascading rules for self-referential foreign keys.
# MySQL InnoDB has some issues around seleting self-refs too.
if testing.against('firebird'):
table_name = args[0]
unpack = (testing.config.db.dialect.
identifier_preparer.unformat_identifiers)
# Only going after ForeignKeys in Columns. May need to
# expand to ForeignKeyConstraint too.
fks = [fk
for col in args if isinstance(col, schema.Column)
for fk in col.args if isinstance(fk, schema.ForeignKey)]
for fk in fks:
# root around in raw spec
ref = fk._colspec
if isinstance(ref, schema.Column):
name = ref.table.name
else:
name = unpack(ref)[-2]
print name, table_name
if name == table_name:
if fk.ondelete is None:
fk.ondelete = 'CASCADE'
if fk.onupdate is None:
fk.onupdate = 'CASCADE'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
|
<commit_before>import testbase
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
<commit_msg>Apply default cascade rules for firebird self-ref ForeignKeys.
git-svn-id: 655ff90ec95d1eeadb1ee4bb9db742a3c015d499@3959 8cd8332f-0806-0410-a4b6-96f4b9520244<commit_after>import testbase
from testlib import testing
schema = None
__all__ = 'Table', 'Column',
table_options = {}
def Table(*args, **kw):
"""A schema.Table wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
test_opts = dict([(k,kw.pop(k)) for k in kw.keys()
if k.startswith('test_')])
kw.update(table_options)
if testbase.db.name == 'mysql':
if 'mysql_engine' not in kw and 'mysql_type' not in kw:
if 'test_needs_fk' in test_opts or 'test_needs_acid' in test_opts:
kw['mysql_engine'] = 'InnoDB'
# Apply some default cascading rules for self-referential foreign keys.
# MySQL InnoDB has some issues around seleting self-refs too.
if testing.against('firebird'):
table_name = args[0]
unpack = (testing.config.db.dialect.
identifier_preparer.unformat_identifiers)
# Only going after ForeignKeys in Columns. May need to
# expand to ForeignKeyConstraint too.
fks = [fk
for col in args if isinstance(col, schema.Column)
for fk in col.args if isinstance(fk, schema.ForeignKey)]
for fk in fks:
# root around in raw spec
ref = fk._colspec
if isinstance(ref, schema.Column):
name = ref.table.name
else:
name = unpack(ref)[-2]
print name, table_name
if name == table_name:
if fk.ondelete is None:
fk.ondelete = 'CASCADE'
if fk.onupdate is None:
fk.onupdate = 'CASCADE'
return schema.Table(*args, **kw)
def Column(*args, **kw):
"""A schema.Column wrapper/hook for dialect-specific tweaks."""
global schema
if schema is None:
from sqlalchemy import schema
# TODO: a Column that creates a Sequence automatically for PK columns,
# which would help Oracle tests
return schema.Column(*args, **kw)
|
af3a1ed1b1114ab85b16a687e0ba29e787c0722b
|
bookmarks/bookmarks.py
|
bookmarks/bookmarks.py
|
from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
|
from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
@app.route('/', methods=['GET'])
def front_page():
return 'Hello, World!'
|
Add route '/' with Hello World
|
Add route '/' with Hello World
|
Python
|
apache-2.0
|
byanofsky/bookmarks,byanofsky/bookmarks,byanofsky/bookmarks
|
from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
Add route '/' with Hello World
|
from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
@app.route('/', methods=['GET'])
def front_page():
return 'Hello, World!'
|
<commit_before>from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
<commit_msg>Add route '/' with Hello World<commit_after>
|
from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
@app.route('/', methods=['GET'])
def front_page():
return 'Hello, World!'
|
from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
Add route '/' with Hello Worldfrom flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
@app.route('/', methods=['GET'])
def front_page():
return 'Hello, World!'
|
<commit_before>from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
<commit_msg>Add route '/' with Hello World<commit_after>from flask import Flask
app = Flask(__name__)
app.config.from_object(__name__)
app.config.update(dict(
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
@app.route('/', methods=['GET'])
def front_page():
return 'Hello, World!'
|
a74a0c0f2066008586114fd5b0908f67c11c0334
|
sipa/blueprints/documents.py
|
sipa/blueprints/documents.py
|
# -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort
from os.path import isfile, realpath, join
bp_documents = Blueprint('documents', __name__)
@bp_documents.route('/images/<image>')
def show_image(image):
print("Trying to show image {}".format(image))
filename = realpath("content/images/{}".format(image))
print("filename: {}".format(filename))
if not isfile(filename):
print("aborting")
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
@bp_documents.route('/documents/<path:document>')
def show_pdf(document):
filename = join(realpath("content/documents/"), document)
if not isfile(filename):
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
|
# -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort, send_from_directory, current_app
from os.path import isfile, realpath, join
from flask.views import View
import os
bp_documents = Blueprint('documents', __name__)
class StaticFiles(View):
def __init__(self, directory):
self.directory = directory
def dispatch_request(self, filename):
if os.path.isabs(self.directory):
directory = self.directory
else:
directory = os.path.join(current_app.root_path, self.directory)
cache_timeout = current_app.get_send_file_max_age(filename)
return send_from_directory(directory, filename,
cache_timeout=cache_timeout)
bp_documents.add_url_rule('/images/<path:filename>',
view_func=StaticFiles.as_view('show_image',
'content/images'))
bp_documents.add_url_rule('/documents/<path:filename>',
view_func=StaticFiles.as_view('show_document',
'content/documents'))
|
Implement generic static file view
|
Implement generic static file view
Use the send_from_directory helper function from Flask which is used
to implement the static endpoint of Flask for our custom files
|
Python
|
mit
|
MarauderXtreme/sipa,agdsn/sipa,lukasjuhrich/sipa,agdsn/sipa,MarauderXtreme/sipa,lukasjuhrich/sipa,agdsn/sipa,agdsn/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa
|
# -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort
from os.path import isfile, realpath, join
bp_documents = Blueprint('documents', __name__)
@bp_documents.route('/images/<image>')
def show_image(image):
print("Trying to show image {}".format(image))
filename = realpath("content/images/{}".format(image))
print("filename: {}".format(filename))
if not isfile(filename):
print("aborting")
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
@bp_documents.route('/documents/<path:document>')
def show_pdf(document):
filename = join(realpath("content/documents/"), document)
if not isfile(filename):
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
Implement generic static file view
Use the send_from_directory helper function from Flask which is used
to implement the static endpoint of Flask for our custom files
|
# -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort, send_from_directory, current_app
from os.path import isfile, realpath, join
from flask.views import View
import os
bp_documents = Blueprint('documents', __name__)
class StaticFiles(View):
def __init__(self, directory):
self.directory = directory
def dispatch_request(self, filename):
if os.path.isabs(self.directory):
directory = self.directory
else:
directory = os.path.join(current_app.root_path, self.directory)
cache_timeout = current_app.get_send_file_max_age(filename)
return send_from_directory(directory, filename,
cache_timeout=cache_timeout)
bp_documents.add_url_rule('/images/<path:filename>',
view_func=StaticFiles.as_view('show_image',
'content/images'))
bp_documents.add_url_rule('/documents/<path:filename>',
view_func=StaticFiles.as_view('show_document',
'content/documents'))
|
<commit_before># -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort
from os.path import isfile, realpath, join
bp_documents = Blueprint('documents', __name__)
@bp_documents.route('/images/<image>')
def show_image(image):
print("Trying to show image {}".format(image))
filename = realpath("content/images/{}".format(image))
print("filename: {}".format(filename))
if not isfile(filename):
print("aborting")
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
@bp_documents.route('/documents/<path:document>')
def show_pdf(document):
filename = join(realpath("content/documents/"), document)
if not isfile(filename):
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
<commit_msg>Implement generic static file view
Use the send_from_directory helper function from Flask which is used
to implement the static endpoint of Flask for our custom files<commit_after>
|
# -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort, send_from_directory, current_app
from os.path import isfile, realpath, join
from flask.views import View
import os
bp_documents = Blueprint('documents', __name__)
class StaticFiles(View):
def __init__(self, directory):
self.directory = directory
def dispatch_request(self, filename):
if os.path.isabs(self.directory):
directory = self.directory
else:
directory = os.path.join(current_app.root_path, self.directory)
cache_timeout = current_app.get_send_file_max_age(filename)
return send_from_directory(directory, filename,
cache_timeout=cache_timeout)
bp_documents.add_url_rule('/images/<path:filename>',
view_func=StaticFiles.as_view('show_image',
'content/images'))
bp_documents.add_url_rule('/documents/<path:filename>',
view_func=StaticFiles.as_view('show_document',
'content/documents'))
|
# -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort
from os.path import isfile, realpath, join
bp_documents = Blueprint('documents', __name__)
@bp_documents.route('/images/<image>')
def show_image(image):
print("Trying to show image {}".format(image))
filename = realpath("content/images/{}".format(image))
print("filename: {}".format(filename))
if not isfile(filename):
print("aborting")
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
@bp_documents.route('/documents/<path:document>')
def show_pdf(document):
filename = join(realpath("content/documents/"), document)
if not isfile(filename):
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
Implement generic static file view
Use the send_from_directory helper function from Flask which is used
to implement the static endpoint of Flask for our custom files# -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort, send_from_directory, current_app
from os.path import isfile, realpath, join
from flask.views import View
import os
bp_documents = Blueprint('documents', __name__)
class StaticFiles(View):
def __init__(self, directory):
self.directory = directory
def dispatch_request(self, filename):
if os.path.isabs(self.directory):
directory = self.directory
else:
directory = os.path.join(current_app.root_path, self.directory)
cache_timeout = current_app.get_send_file_max_age(filename)
return send_from_directory(directory, filename,
cache_timeout=cache_timeout)
bp_documents.add_url_rule('/images/<path:filename>',
view_func=StaticFiles.as_view('show_image',
'content/images'))
bp_documents.add_url_rule('/documents/<path:filename>',
view_func=StaticFiles.as_view('show_document',
'content/documents'))
|
<commit_before># -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort
from os.path import isfile, realpath, join
bp_documents = Blueprint('documents', __name__)
@bp_documents.route('/images/<image>')
def show_image(image):
print("Trying to show image {}".format(image))
filename = realpath("content/images/{}".format(image))
print("filename: {}".format(filename))
if not isfile(filename):
print("aborting")
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
@bp_documents.route('/documents/<path:document>')
def show_pdf(document):
filename = join(realpath("content/documents/"), document)
if not isfile(filename):
abort(404)
try:
return send_file(filename)
except IOError:
abort(404)
<commit_msg>Implement generic static file view
Use the send_from_directory helper function from Flask which is used
to implement the static endpoint of Flask for our custom files<commit_after># -*- coding: utf-8 -*-
from flask import Blueprint, send_file, abort, send_from_directory, current_app
from os.path import isfile, realpath, join
from flask.views import View
import os
bp_documents = Blueprint('documents', __name__)
class StaticFiles(View):
def __init__(self, directory):
self.directory = directory
def dispatch_request(self, filename):
if os.path.isabs(self.directory):
directory = self.directory
else:
directory = os.path.join(current_app.root_path, self.directory)
cache_timeout = current_app.get_send_file_max_age(filename)
return send_from_directory(directory, filename,
cache_timeout=cache_timeout)
bp_documents.add_url_rule('/images/<path:filename>',
view_func=StaticFiles.as_view('show_image',
'content/images'))
bp_documents.add_url_rule('/documents/<path:filename>',
view_func=StaticFiles.as_view('show_document',
'content/documents'))
|
ce13dd0fd049782531e939da9a5238a6f5493b8d
|
mpld3/test_plots/test_date_ticks.py
|
mpld3/test_plots/test_date_ticks.py
|
"""Plot to test custom date axis tick locations and labels"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
|
"""
Plot to test custom date axis tick locations and labels
NOTE (@vladh): We may see different behaviour in mpld3 vs d3 for the y axis, because we never
specified exactly how we want the y axis formatted. This is ok.
"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
|
Add note to misleading test
|
Add note to misleading test
|
Python
|
bsd-3-clause
|
jakevdp/mpld3,mpld3/mpld3,jakevdp/mpld3,mpld3/mpld3
|
"""Plot to test custom date axis tick locations and labels"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
Add note to misleading test
|
"""
Plot to test custom date axis tick locations and labels
NOTE (@vladh): We may see different behaviour in mpld3 vs d3 for the y axis, because we never
specified exactly how we want the y axis formatted. This is ok.
"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
|
<commit_before>"""Plot to test custom date axis tick locations and labels"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
<commit_msg>Add note to misleading test<commit_after>
|
"""
Plot to test custom date axis tick locations and labels
NOTE (@vladh): We may see different behaviour in mpld3 vs d3 for the y axis, because we never
specified exactly how we want the y axis formatted. This is ok.
"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
|
"""Plot to test custom date axis tick locations and labels"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
Add note to misleading test"""
Plot to test custom date axis tick locations and labels
NOTE (@vladh): We may see different behaviour in mpld3 vs d3 for the y axis, because we never
specified exactly how we want the y axis formatted. This is ok.
"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
|
<commit_before>"""Plot to test custom date axis tick locations and labels"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
<commit_msg>Add note to misleading test<commit_after>"""
Plot to test custom date axis tick locations and labels
NOTE (@vladh): We may see different behaviour in mpld3 vs d3 for the y axis, because we never
specified exactly how we want the y axis formatted. This is ok.
"""
from datetime import datetime
import matplotlib.pyplot as plt
import mpld3
def create_plot():
times = [datetime(2013, 12, i) for i in range(1, 20)]
ticks = [times[0], times[1], times[2], times[6], times[-2], times[-1]]
labels = [t.strftime("%Y-%m-%d") for t in ticks]
plt.plot_date(times, times, xdate=True, ydate=True)
plt.xticks(ticks, labels)
plt.yticks(ticks)
plt.xlim([times[1], times[-2]])
plt.ylim([times[1], times[-2]])
return plt.gcf()
def test_date():
fig = create_plot()
_ = mpld3.fig_to_html(fig)
plt.close(fig)
if __name__ == "__main__":
mpld3.show(create_plot(), template_type='simple')
|
c029905a8ffad7fcf7ef70591dd0ad3f72365c09
|
wagtail_uplift/wagtail_hooks.py
|
wagtail_uplift/wagtail_hooks.py
|
from django.conf.urls import url
from django.utils.html import format_html
from django.contrib.staticfiles.templatetags.staticfiles import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
|
import django
from django.conf.urls import url
from django.utils.html import format_html
if django.VERSION[0] == "2":
from django.contrib.staticfiles.templatetags.staticfiles import static
elif django.VERSION[0] == "3":
from django.templatetags.static import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
|
Update static import to support Django 3
|
Update static import to support Django 3
|
Python
|
bsd-3-clause
|
l1f7/wagtail_uplift,l1f7/wagtail_uplift,l1f7/wagtail_uplift
|
from django.conf.urls import url
from django.utils.html import format_html
from django.contrib.staticfiles.templatetags.staticfiles import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
Update static import to support Django 3
|
import django
from django.conf.urls import url
from django.utils.html import format_html
if django.VERSION[0] == "2":
from django.contrib.staticfiles.templatetags.staticfiles import static
elif django.VERSION[0] == "3":
from django.templatetags.static import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
|
<commit_before>from django.conf.urls import url
from django.utils.html import format_html
from django.contrib.staticfiles.templatetags.staticfiles import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
<commit_msg>Update static import to support Django 3<commit_after>
|
import django
from django.conf.urls import url
from django.utils.html import format_html
if django.VERSION[0] == "2":
from django.contrib.staticfiles.templatetags.staticfiles import static
elif django.VERSION[0] == "3":
from django.templatetags.static import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
|
from django.conf.urls import url
from django.utils.html import format_html
from django.contrib.staticfiles.templatetags.staticfiles import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
Update static import to support Django 3import django
from django.conf.urls import url
from django.utils.html import format_html
if django.VERSION[0] == "2":
from django.contrib.staticfiles.templatetags.staticfiles import static
elif django.VERSION[0] == "3":
from django.templatetags.static import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
|
<commit_before>from django.conf.urls import url
from django.utils.html import format_html
from django.contrib.staticfiles.templatetags.staticfiles import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
<commit_msg>Update static import to support Django 3<commit_after>import django
from django.conf.urls import url
from django.utils.html import format_html
if django.VERSION[0] == "2":
from django.contrib.staticfiles.templatetags.staticfiles import static
elif django.VERSION[0] == "3":
from django.templatetags.static import static
from wagtail.core import hooks
from wagtail.admin.menu import MenuItem
@hooks.register('insert_global_admin_css')
def global_admin_css():
html = '<link rel="stylesheet" href="{path}">'.format(path=static('css/wagtail_uplift.min.css'))
return format_html(html)
@hooks.register('register_admin_menu_item')
def register_tile_menu_item():
return MenuItem(
'Pages',
'/cmsadmin/pages/',
classnames='icon icon-folder-open-1',
order=1)
@hooks.register('construct_main_menu')
def hide_explorer_menu_item(request, menu_items):
menu_items[:] = [item for item in menu_items if item.name != 'explorer']
|
5657dd437af76ecccdb671a1a09a4c6f9874aab0
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
|
Check if epages6 settings are configured
|
Check if epages6 settings are configured
|
Python
|
mit
|
ePages-rnd/SublimeLinter-contrib-perl-epages6
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
Check if epages6 settings are configured
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
<commit_msg>Check if epages6 settings are configured<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
Check if epages6 settings are configured#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
<commit_msg>Check if epages6 settings are configured<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the PerlEpages6 plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class PerlEpages6(Linter):
"""Provides an interface to perl on an epages6 virtual machine from a local machine.
Requires a configured copy of the Epages6 plugin (see https://github.com/ePages-rnd/sublimetext-epages6)."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('modernperl', 'perl')
regex = r'(?P<message>.+?) at .+? line (?P<line>\d+)(, near "(?P<near>.+?)")?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'pm'
|
46ba4f97d3ad2d673e8f3acb86d8c75905bc319f
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
syntax = ('pug', 'jade')
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {'--reporter=': 'inline'}
default_type = highlight.WARNING
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
Move attribute to "selector" in defaults from "syntax", as suggested by SublimeLinter
|
Move attribute to "selector" in defaults from "syntax", as suggested by SublimeLinter
|
Python
|
mit
|
benedfit/SublimeLinter-contrib-pug-lint,benedfit/SublimeLinter-contrib-jade-lint
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
syntax = ('pug', 'jade')
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {'--reporter=': 'inline'}
default_type = highlight.WARNING
Move attribute to "selector" in defaults from "syntax", as suggested by SublimeLinter
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
syntax = ('pug', 'jade')
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {'--reporter=': 'inline'}
default_type = highlight.WARNING
<commit_msg>Move attribute to "selector" in defaults from "syntax", as suggested by SublimeLinter<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
syntax = ('pug', 'jade')
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {'--reporter=': 'inline'}
default_type = highlight.WARNING
Move attribute to "selector" in defaults from "syntax", as suggested by SublimeLinter#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
syntax = ('pug', 'jade')
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {'--reporter=': 'inline'}
default_type = highlight.WARNING
<commit_msg>Move attribute to "selector" in defaults from "syntax", as suggested by SublimeLinter<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ben Edwards
# Copyright (c) 2015 Ben Edwards
#
# License: MIT
#
"""This module exports the PugLint plugin class."""
from SublimeLinter.lint import NodeLinter, util, highlight
class PugLint(NodeLinter):
"""Provides an interface to pug-lint."""
npm_name = 'pug-lint'
cmd = 'pug-lint @ *'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.1.1'
regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)'
multiline = False
tempfile_suffix = 'pug'
error_stream = util.STREAM_BOTH
config_file = ('--config', '.pug-lintrc', '.pug-lint.json', '.jade-lintrc', '.jade-lint.json', '~')
defaults = {
'selector': 'text.pug, source.pypug, text.jade',
'--reporter=': 'inline'
}
default_type = highlight.WARNING
|
4bf01c350744e8cbf00750ec85d825f22e06dd29
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = 'shell-unix-generic'
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = ('shell-unix-generic', 'bash')
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
|
Handle new sublime syntax: bash
|
Handle new sublime syntax: bash
|
Python
|
mit
|
SublimeLinter/SublimeLinter-shellcheck
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = 'shell-unix-generic'
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
Handle new sublime syntax: bash
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = ('shell-unix-generic', 'bash')
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = 'shell-unix-generic'
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
<commit_msg>Handle new sublime syntax: bash<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = ('shell-unix-generic', 'bash')
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = 'shell-unix-generic'
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
Handle new sublime syntax: bash#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = ('shell-unix-generic', 'bash')
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = 'shell-unix-generic'
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
<commit_msg>Handle new sublime syntax: bash<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""
This module exports the Shellcheck plugin class.
Example output with --format gcc
-:230:7: warning: Quote this to prevent word splitting. [SC2046]
-:230:7: note: Useless echo? Instead of 'echo $(cmd)', just use 'cmd'. [SC2005]
-:230:158: note: Double quote to prevent globbing and word splitting. [SC2086]
-:234:10: error: Add double quotes around ${VAR[@]}, otherwise it's just like $* and breaks on spaces. [SC2068]
"""
from SublimeLinter.lint import Linter
class Shellcheck(Linter):
"""Provides an interface to shellcheck."""
syntax = ('shell-unix-generic', 'bash')
cmd = 'shellcheck --format gcc -'
regex = (
r'^.+?:(?P<line>\d+):(?P<col>\d+): '
r'(?:(?P<error>error)|(?P<warning>(warning|note))): '
r'(?P<message>.+)$'
)
defaults = {
'--exclude=,': ''
}
inline_overrides = 'exclude'
comment_re = r'\s*#'
|
1fec4c084e4d96d66245aaf90882047857724b90
|
functional_tests/pages/user_bets.py
|
functional_tests/pages/user_bets.py
|
class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('div.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
|
class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('tr.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
|
Fix ft for my bets page
|
Fix ft for my bets page
|
Python
|
mit
|
asyler/betleague,asyler/betleague,asyler/betleague
|
class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('div.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
Fix ft for my bets page
|
class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('tr.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
|
<commit_before>class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('div.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
<commit_msg>Fix ft for my bets page<commit_after>
|
class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('tr.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
|
class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('div.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
Fix ft for my bets pageclass UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('tr.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
|
<commit_before>class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('div.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
<commit_msg>Fix ft for my bets page<commit_after>class UserBetsPage(object):
def __init__(self, test):
self.test = test
self.url = self.test.live_server_url + '/my_bets'
def go(self):
self.test.browser.get(self.url)
def get_matches(self):
return self.test.browser \
.find_elements_by_css_selector('tr.match')
def get_match_body(self, match):
return match.find_element_by_class_name('bet').text
def get_match_input(self, match):
return match.find_element_by_tag_name('input')
def press_save_button(self):
self.test.browser.find_element_by_id('save_bets').click()
def get_match_error(self, match):
return match.find_element_by_css_selector('div.error').text
|
d8ae3ab5f6baf0ee965548f8df37e1a4b331a8aa
|
install_all_addons.py
|
install_all_addons.py
|
import bpy
# install and activate `emboss plane`
bpy.ops.wm.addon_install(filepath='emboss_plane.py')
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
bpy.ops.wm.addon_install(filepath='name_plate.py')
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
import bpy
import os
# get current directory
current_dir = os.getcwd()
# install and activate `emboss plane`
emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
Update install script with full file paths
|
Update install script with full file paths
This is needed to make the script run on Windows. The `os` package is
used to make sure it will run under any OS.
|
Python
|
mit
|
TactileUniverse/3D-Printed-Galaxy-Software
|
import bpy
# install and activate `emboss plane`
bpy.ops.wm.addon_install(filepath='emboss_plane.py')
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
bpy.ops.wm.addon_install(filepath='name_plate.py')
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
Update install script with full file paths
This is needed to make the script run on Windows. The `os` package is
used to make sure it will run under any OS.
|
import bpy
import os
# get current directory
current_dir = os.getcwd()
# install and activate `emboss plane`
emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
<commit_before>import bpy
# install and activate `emboss plane`
bpy.ops.wm.addon_install(filepath='emboss_plane.py')
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
bpy.ops.wm.addon_install(filepath='name_plate.py')
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
<commit_msg>Update install script with full file paths
This is needed to make the script run on Windows. The `os` package is
used to make sure it will run under any OS.<commit_after>
|
import bpy
import os
# get current directory
current_dir = os.getcwd()
# install and activate `emboss plane`
emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
import bpy
# install and activate `emboss plane`
bpy.ops.wm.addon_install(filepath='emboss_plane.py')
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
bpy.ops.wm.addon_install(filepath='name_plate.py')
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
Update install script with full file paths
This is needed to make the script run on Windows. The `os` package is
used to make sure it will run under any OS.import bpy
import os
# get current directory
current_dir = os.getcwd()
# install and activate `emboss plane`
emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
<commit_before>import bpy
# install and activate `emboss plane`
bpy.ops.wm.addon_install(filepath='emboss_plane.py')
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
bpy.ops.wm.addon_install(filepath='name_plate.py')
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
<commit_msg>Update install script with full file paths
This is needed to make the script run on Windows. The `os` package is
used to make sure it will run under any OS.<commit_after>import bpy
import os
# get current directory
current_dir = os.getcwd()
# install and activate `emboss plane`
emboss_plane_filepath = os.path.join(current_dir, 'emboss_plane.py')
bpy.ops.wm.addon_install(filepath=emboss_plane_filepath)
bpy.ops.wm.addon_enable(module='emboss_plane')
# install and activate `name plate`
name_plate_filepath = os.path.join(current_dir, 'name_plate.py')
bpy.ops.wm.addon_install(filepath=name_plate_filepath)
bpy.ops.wm.addon_enable(module='name_plate')
# save user preferences
bpy.ops.wm.save_userpref()
|
d0136d302524ff08e33ebdbab835b499aeeb2c2c
|
kboard/board/models.py
|
kboard/board/models.py
|
from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
|
from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.board.slug, self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
|
Fix 'get_absolute_url()' refer to url
|
Fix 'get_absolute_url()' refer to url
|
Python
|
mit
|
hyesun03/k-board,guswnsxodlf/k-board,hyesun03/k-board,darjeeling/k-board,kboard/kboard,cjh5414/kboard,cjh5414/kboard,guswnsxodlf/k-board,hyesun03/k-board,cjh5414/kboard,guswnsxodlf/k-board,kboard/kboard,kboard/kboard
|
from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
Fix 'get_absolute_url()' refer to url
|
from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.board.slug, self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
|
<commit_before>from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
<commit_msg>Fix 'get_absolute_url()' refer to url<commit_after>
|
from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.board.slug, self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
|
from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
Fix 'get_absolute_url()' refer to urlfrom django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.board.slug, self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
|
<commit_before>from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
<commit_msg>Fix 'get_absolute_url()' refer to url<commit_after>from django.db import models
from django.core.urlresolvers import reverse
from django_summernote import models as summer_model
from django_summernote import fields as summer_fields
class Board(models.Model):
def get_absolute_url(self):
return reverse('board:post_list', args=[self.id])
slug = models.TextField(default='', unique=True)
name = models.TextField(default='')
class Post(models.Model):
def get_absolute_url(self):
return reverse('board:view_post', args=[self.board.slug, self.id])
title = models.TextField(default='')
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
class Meta:
index_together = [
["title", "content"],
]
class SummerNote(summer_model.Attachment):
summer_field = summer_fields.SummernoteTextField(default='')
class Comment(models.Model):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
|
4f4ba39bf2d270ef1cb34afe1a5ebe7816d448b7
|
manage.py
|
manage.py
|
#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(make_shell)
script.run()
|
#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True, hostname='')
action_shell = script.make_shell(make_shell)
script.run()
|
Set hostname to '' so the server binds to all interfaces.
|
Set hostname to '' so the server binds to all interfaces.
|
Python
|
mit
|
kurtraschke/cadors-parse,kurtraschke/cadors-parse
|
#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(make_shell)
script.run()
Set hostname to '' so the server binds to all interfaces.
|
#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True, hostname='')
action_shell = script.make_shell(make_shell)
script.run()
|
<commit_before>#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(make_shell)
script.run()
<commit_msg>Set hostname to '' so the server binds to all interfaces.<commit_after>
|
#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True, hostname='')
action_shell = script.make_shell(make_shell)
script.run()
|
#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(make_shell)
script.run()
Set hostname to '' so the server binds to all interfaces.#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True, hostname='')
action_shell = script.make_shell(make_shell)
script.run()
|
<commit_before>#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(make_shell)
script.run()
<commit_msg>Set hostname to '' so the server binds to all interfaces.<commit_after>#!/usr/bin/env python
from werkzeug import script
def make_app():
from cadorsfeed.application import CadorsFeed
return CadorsFeed()
def make_shell():
from cadorsfeed import utils
application = make_app()
return locals()
action_runserver = script.make_runserver(make_app, use_reloader=True, hostname='')
action_shell = script.make_shell(make_shell)
script.run()
|
d4dd0fe826fb187b40e807417092118b40f23517
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Use production settings as default
|
Use production settings as default
|
Python
|
bsd-3-clause
|
andrijan/csgostats,andrijan/csgostats,andrijan/csgostats,andrijan/csgostats
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Use production settings as default
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Use production settings as default<commit_after>
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Use production settings as default#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Use production settings as default<commit_after>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
7668ba1b467e2c48719fc6e3a53932ec1bfb9d18
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
print 'cdecimal'
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Remove the cdecimal debug print
|
Remove the cdecimal debug print
|
Python
|
bsd-2-clause
|
cmptrgeekken/evething,madcowfred/evething,cmptrgeekken/evething,Gillingham/evething,Gillingham/evething,Gillingham/evething,cmptrgeekken/evething,madcowfred/evething,madcowfred/evething,madcowfred/evething,Gillingham/evething,cmptrgeekken/evething,cmptrgeekken/evething
|
#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
print 'cdecimal'
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Remove the cdecimal debug print
|
#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
print 'cdecimal'
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Remove the cdecimal debug print<commit_after>
|
#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
print 'cdecimal'
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Remove the cdecimal debug print#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
print 'cdecimal'
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Remove the cdecimal debug print<commit_after>#!/usr/bin/env python
import os
import sys
# try using cdecimal for faster Decimal type
try:
import cdecimal
except ImportError:
pass
else:
sys.modules["decimal"] = cdecimal
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "evething.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
399430076227f42f5d168c5b2264933c32f4b52a
|
lib/ansible/release.py
|
lib/ansible/release.py
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
Update ansible version number to 2.8.0.dev0
|
Update ansible version number to 2.8.0.dev0
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
Update ansible version number to 2.8.0.dev0
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
<commit_before># (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
<commit_msg>Update ansible version number to 2.8.0.dev0<commit_after>
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
Update ansible version number to 2.8.0.dev0# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
<commit_before># (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.7.0.a1.post0'
__author__ = 'Ansible, Inc.'
__codename__ = 'In the Light'
<commit_msg>Update ansible version number to 2.8.0.dev0<commit_after># (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__version__ = '2.8.0.dev0'
__author__ = 'Ansible, Inc.'
__codename__ = 'TBD'
|
4a1976c6aa21f519825c527c795e60dffa7f46db
|
githubsetupircnotifications.py
|
githubsetupircnotifications.py
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
|
Print message if signing in failed
|
Print message if signing in failed
|
Python
|
mit
|
kragniz/github-setup-irc-notifications
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
Print message if signing in failed
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
|
<commit_before>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
<commit_msg>Print message if signing in failed<commit_after>
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
Print message if signing in failed"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
|
<commit_before>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
<commit_msg>Print message if signing in failed<commit_after>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
|
21b1206da978434e388e43a5258b9c0f09fc0e1e
|
tumblr/data/cleanup.py
|
tumblr/data/cleanup.py
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
del lines[x]
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if size != 0:
continue
print(image)
remove_image_from_csvs(image)
os.remove(path)
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if "." in image:
continue
print(image)
remove_image_from_csvs(image)
os.rename(path, path+".gif")
|
Add suffixes to all gifs
|
Add suffixes to all gifs
|
Python
|
mit
|
albertyw/devops-reactions-index,albertyw/devops-reactions-index,albertyw/reaction-pics,albertyw/reaction-pics,albertyw/reaction-pics,albertyw/devops-reactions-index,albertyw/devops-reactions-index,albertyw/reaction-pics
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
del lines[x]
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if size != 0:
continue
print(image)
remove_image_from_csvs(image)
os.remove(path)
Add suffixes to all gifs
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if "." in image:
continue
print(image)
remove_image_from_csvs(image)
os.rename(path, path+".gif")
|
<commit_before>import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
del lines[x]
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if size != 0:
continue
print(image)
remove_image_from_csvs(image)
os.remove(path)
<commit_msg>Add suffixes to all gifs<commit_after>
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if "." in image:
continue
print(image)
remove_image_from_csvs(image)
os.rename(path, path+".gif")
|
import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
del lines[x]
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if size != 0:
continue
print(image)
remove_image_from_csvs(image)
os.remove(path)
Add suffixes to all gifsimport os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if "." in image:
continue
print(image)
remove_image_from_csvs(image)
os.rename(path, path+".gif")
|
<commit_before>import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
del lines[x]
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if size != 0:
continue
print(image)
remove_image_from_csvs(image)
os.remove(path)
<commit_msg>Add suffixes to all gifs<commit_after>import os
def remove_image_from_csvs(image):
for csv in os.listdir("."):
if csv[-4:] != ".csv":
continue
with open(csv, "r") as h:
lines = h.readlines()
modified = False
for x in range(len(lines)):
if image in lines[x]:
lines[x] = lines[x].replace(image, image+".gif")
modified = True
print(lines[x])
if not modified:
continue
with open(csv, "w") as h:
h.write("".join(lines))
images = os.listdir("static")
for image in images:
if image == ".gitkeep":
continue
path = os.path.join("static", image)
size = os.path.getsize(path)
if "." in image:
continue
print(image)
remove_image_from_csvs(image)
os.rename(path, path+".gif")
|
ecd43e2d3679759d2ee389b35752cb8db18c5b22
|
microdrop/microdrop.py
|
microdrop/microdrop.py
|
"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import update
if __name__ == '__main__':
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()
|
"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import utility
import update
if __name__ == '__main__':
# Change directory to where microdrop.py resides, so this program can be
# run from any directory.
os.chdir(utility.base_path())
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()
|
Change dir to allow script to be run from anywhere
|
Change dir to allow script to be run from anywhere
|
Python
|
bsd-3-clause
|
wheeler-microfluidics/microdrop
|
"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import update
if __name__ == '__main__':
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()Change dir to allow script to be run from anywhere
|
"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import utility
import update
if __name__ == '__main__':
# Change directory to where microdrop.py resides, so this program can be
# run from any directory.
os.chdir(utility.base_path())
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()
|
<commit_before>"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import update
if __name__ == '__main__':
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()<commit_msg>Change dir to allow script to be run from anywhere<commit_after>
|
"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import utility
import update
if __name__ == '__main__':
# Change directory to where microdrop.py resides, so this program can be
# run from any directory.
os.chdir(utility.base_path())
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()
|
"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import update
if __name__ == '__main__':
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()Change dir to allow script to be run from anywhere"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import utility
import update
if __name__ == '__main__':
# Change directory to where microdrop.py resides, so this program can be
# run from any directory.
os.chdir(utility.base_path())
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()
|
<commit_before>"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import update
if __name__ == '__main__':
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()<commit_msg>Change dir to allow script to be run from anywhere<commit_after>"""
Copyright 2011 Ryan Fobel
This file is part of Microdrop.
Microdrop is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
Foundation, either version 3 of the License, or
(at your option) any later version.
Microdrop is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Microdrop. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import utility
import update
if __name__ == '__main__':
# Change directory to where microdrop.py resides, so this program can be
# run from any directory.
os.chdir(utility.base_path())
archive_version = update.archive_version()
driver_version = update.package_version()
firmware_version = update.firmware_version()
print "archive version=", archive_version
print "driver_version=", driver_version
print "firmware_version=", firmware_version
if driver_version != archive_version:
print "updating driver to version %s..." % archive_version
if update.update_package():
print " success"
else:
print " failed"
if firmware_version != archive_version:
print "updating firmware to version %s..." % archive_version
if update.update_firmware():
print " success"
else:
print " failed"
from app import App
app = App()
|
78ec7d5336eb65ff845da7ea9f93d34b402f5a0f
|
ironic/drivers/drac.py
|
ironic/drivers/drac.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
Add the PXE VendorPassthru interface to PXEDracDriver
|
Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a
|
Python
|
apache-2.0
|
NaohiroTamura/ironic,SauloAislan/ironic,openstack/ironic,pshchelo/ironic,naototty/vagrant-lxc-ironic,ramineni/myironic,bacaldwell/ironic,openstack/ironic,rackerlabs/ironic,froyobin/ironic,rdo-management/ironic,pshchelo/ironic,ionutbalutoiu/ironic,Tan0/ironic,hpproliant/ironic,supermari0/ironic,dims/ironic,naototty/vagrant-lxc-ironic,NaohiroTamura/ironic,naterh/ironic,devananda/ironic,Tehsmash/ironic,ionutbalutoiu/ironic,SauloAislan/ironic,dims/ironic,redhat-openstack/ironic,bacaldwell/ironic
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
<commit_before>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
<commit_msg>Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a<commit_after>
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
<commit_before>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
<commit_msg>Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a<commit_after>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
fa20d5b6a9b636fec7fc542cf899bf86c00dd8de
|
bakery/static_urls.py
|
bakery/static_urls.py
|
from django.conf import settings
from django.conf.urls import patterns, url
urlpatterns = patterns(
"bakery.static_views",
url(r"^(.*)$", "serve", {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
)
|
from django.conf import settings
from django.conf.urls import url
from bakery.static_views import serve
urlpatterns = [
url(r"^(.*)$", serve, {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
]
|
Upgrade to Django 1.10 style url patterns
|
Upgrade to Django 1.10 style url patterns
|
Python
|
mit
|
datadesk/django-bakery,stvkas/django-bakery,stvkas/django-bakery,datadesk/django-bakery,datadesk/django-bakery,stvkas/django-bakery
|
from django.conf import settings
from django.conf.urls import patterns, url
urlpatterns = patterns(
"bakery.static_views",
url(r"^(.*)$", "serve", {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
)
Upgrade to Django 1.10 style url patterns
|
from django.conf import settings
from django.conf.urls import url
from bakery.static_views import serve
urlpatterns = [
url(r"^(.*)$", serve, {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, url
urlpatterns = patterns(
"bakery.static_views",
url(r"^(.*)$", "serve", {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
)
<commit_msg>Upgrade to Django 1.10 style url patterns<commit_after>
|
from django.conf import settings
from django.conf.urls import url
from bakery.static_views import serve
urlpatterns = [
url(r"^(.*)$", serve, {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
]
|
from django.conf import settings
from django.conf.urls import patterns, url
urlpatterns = patterns(
"bakery.static_views",
url(r"^(.*)$", "serve", {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
)
Upgrade to Django 1.10 style url patternsfrom django.conf import settings
from django.conf.urls import url
from bakery.static_views import serve
urlpatterns = [
url(r"^(.*)$", serve, {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
]
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, url
urlpatterns = patterns(
"bakery.static_views",
url(r"^(.*)$", "serve", {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
)
<commit_msg>Upgrade to Django 1.10 style url patterns<commit_after>from django.conf import settings
from django.conf.urls import url
from bakery.static_views import serve
urlpatterns = [
url(r"^(.*)$", serve, {
"document_root": settings.BUILD_DIR,
'show_indexes': True,
'default': 'index.html'
}),
]
|
8d93082178200834a3df1b09d08cd53073eb07fe
|
coverage_diff.py
|
coverage_diff.py
|
import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
start_line -= 1
with open(filename+',cover') as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
|
import os
import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
cover_file_name = filename+',cover'
if not os.path.is_file(cover_file_name):
return False
start_line -= 1
with open(cover_file_name) as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
|
Check that coverage file exists
|
Check that coverage file exists
|
Python
|
apache-2.0
|
caio2k/RIDE,fingeronthebutton/RIDE,HelioGuilherme66/RIDE,HelioGuilherme66/RIDE,robotframework/RIDE,fingeronthebutton/RIDE,caio2k/RIDE,robotframework/RIDE,fingeronthebutton/RIDE,robotframework/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,HelioGuilherme66/RIDE,caio2k/RIDE
|
import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
start_line -= 1
with open(filename+',cover') as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
Check that coverage file exists
|
import os
import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
cover_file_name = filename+',cover'
if not os.path.is_file(cover_file_name):
return False
start_line -= 1
with open(cover_file_name) as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
|
<commit_before>import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
start_line -= 1
with open(filename+',cover') as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
<commit_msg>Check that coverage file exists<commit_after>
|
import os
import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
cover_file_name = filename+',cover'
if not os.path.is_file(cover_file_name):
return False
start_line -= 1
with open(cover_file_name) as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
|
import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
start_line -= 1
with open(filename+',cover') as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
Check that coverage file existsimport os
import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
cover_file_name = filename+',cover'
if not os.path.is_file(cover_file_name):
return False
start_line -= 1
with open(cover_file_name) as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
|
<commit_before>import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
start_line -= 1
with open(filename+',cover') as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
<commit_msg>Check that coverage file exists<commit_after>import os
import re
filename_matcher = re.compile(r'^\+\+\+ b/([\w/\._]+)\s+.+$')
diff_line_matcher = re.compile(r'^@@ -\d+,\d+ \+(\d+),(\d+) @@$')
def report_diffs(diff):
for line in diff:
name_match = filename_matcher.match(line)
if name_match:
filename = name_match.group(1)
continue
diff_line_match = diff_line_matcher.match(line)
if diff_line_match:
start_line = int(diff_line_match.group(1))
number_of_lines = int(diff_line_match.group(2))
if filename.startswith('src') and not is_covered(filename, start_line, number_of_lines):
sys.exit(1)
def is_covered(filename, start_line, number_of_lines):
cover_file_name = filename+',cover'
if not os.path.is_file(cover_file_name):
return False
start_line -= 1
with open(cover_file_name) as annotation:
lines = annotation.readlines()[start_line:start_line+number_of_lines]
for line in lines:
if not line.startswith('>'):
print 'Line not covered %r in file "%s"!!' % (line, filename)
return False
return True
if __name__ == '__main__':
import sys
diff_file = sys.argv[1]
with open(diff_file) as diff:
report_diffs(diff)
print 'All lines covered'
|
3a1005de48a0883853c632b17220f2331bdc7017
|
primes.py
|
primes.py
|
#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
for i in xrange(2, sqrt):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
print generate_list_of_primes(1, 100)
print generate_list_of_primes(1000, 9999)
|
#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
# use sqrt + 1 to handle smaller composites where the sqrt is 2.
# we need an initial case to test fail out as False.
for i in xrange(2, sqrt + 1):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
# check for some simple prime and not prime numbers
assert not is_prime(1), "1 should not be prime"
assert is_prime(5), "5 should be prime"
assert not is_prime(6), "6 should not be prime"
assert not is_prime(100), "100 should be be prime"
assert not is_prime(1000), "1000 should not be prime"
# check the primes from the problem 49 example
assert is_prime(1487), "1487 should be prime"
assert is_prime(4817), "4817 should be prime"
assert is_prime(8147), "8147 should be prime"
for num in generate_list_of_primes(1, 100):
assert is_prime(num), "%s should be prime" % num
for num in generate_list_of_primes(1000, 9999):
assert is_prime(num), "%s should be prime" % num
print "all assertions passed"
|
Add tests and a bug fix found by tests.
|
Add tests and a bug fix found by tests.
Since small numbers might have a sqrt == 2 add +1 to the range used for testing.
|
Python
|
mit
|
smillet15/project-euler
|
#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
for i in xrange(2, sqrt):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
print generate_list_of_primes(1, 100)
print generate_list_of_primes(1000, 9999)
Add tests and a bug fix found by tests.
Since small numbers might have a sqrt == 2 add +1 to the range used for testing.
|
#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
# use sqrt + 1 to handle smaller composites where the sqrt is 2.
# we need an initial case to test fail out as False.
for i in xrange(2, sqrt + 1):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
# check for some simple prime and not prime numbers
assert not is_prime(1), "1 should not be prime"
assert is_prime(5), "5 should be prime"
assert not is_prime(6), "6 should not be prime"
assert not is_prime(100), "100 should be be prime"
assert not is_prime(1000), "1000 should not be prime"
# check the primes from the problem 49 example
assert is_prime(1487), "1487 should be prime"
assert is_prime(4817), "4817 should be prime"
assert is_prime(8147), "8147 should be prime"
for num in generate_list_of_primes(1, 100):
assert is_prime(num), "%s should be prime" % num
for num in generate_list_of_primes(1000, 9999):
assert is_prime(num), "%s should be prime" % num
print "all assertions passed"
|
<commit_before>#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
for i in xrange(2, sqrt):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
print generate_list_of_primes(1, 100)
print generate_list_of_primes(1000, 9999)
<commit_msg>Add tests and a bug fix found by tests.
Since small numbers might have a sqrt == 2 add +1 to the range used for testing.<commit_after>
|
#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
# use sqrt + 1 to handle smaller composites where the sqrt is 2.
# we need an initial case to test fail out as False.
for i in xrange(2, sqrt + 1):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
# check for some simple prime and not prime numbers
assert not is_prime(1), "1 should not be prime"
assert is_prime(5), "5 should be prime"
assert not is_prime(6), "6 should not be prime"
assert not is_prime(100), "100 should be be prime"
assert not is_prime(1000), "1000 should not be prime"
# check the primes from the problem 49 example
assert is_prime(1487), "1487 should be prime"
assert is_prime(4817), "4817 should be prime"
assert is_prime(8147), "8147 should be prime"
for num in generate_list_of_primes(1, 100):
assert is_prime(num), "%s should be prime" % num
for num in generate_list_of_primes(1000, 9999):
assert is_prime(num), "%s should be prime" % num
print "all assertions passed"
|
#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
for i in xrange(2, sqrt):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
print generate_list_of_primes(1, 100)
print generate_list_of_primes(1000, 9999)
Add tests and a bug fix found by tests.
Since small numbers might have a sqrt == 2 add +1 to the range used for testing.#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
# use sqrt + 1 to handle smaller composites where the sqrt is 2.
# we need an initial case to test fail out as False.
for i in xrange(2, sqrt + 1):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
# check for some simple prime and not prime numbers
assert not is_prime(1), "1 should not be prime"
assert is_prime(5), "5 should be prime"
assert not is_prime(6), "6 should not be prime"
assert not is_prime(100), "100 should be be prime"
assert not is_prime(1000), "1000 should not be prime"
# check the primes from the problem 49 example
assert is_prime(1487), "1487 should be prime"
assert is_prime(4817), "4817 should be prime"
assert is_prime(8147), "8147 should be prime"
for num in generate_list_of_primes(1, 100):
assert is_prime(num), "%s should be prime" % num
for num in generate_list_of_primes(1000, 9999):
assert is_prime(num), "%s should be prime" % num
print "all assertions passed"
|
<commit_before>#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
for i in xrange(2, sqrt):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
print generate_list_of_primes(1, 100)
print generate_list_of_primes(1000, 9999)
<commit_msg>Add tests and a bug fix found by tests.
Since small numbers might have a sqrt == 2 add +1 to the range used for testing.<commit_after>#!/usr/bin/env python
""" Tools for checking and generating prime numbers. """
import math
def is_prime(num):
""" Test if a number is prime. """
if num < 2:
return False
# take advantage of the speedup gained by only checking the sqrt
sqrt = int(math.sqrt(num))
# use xrange to generate the list as we iterate
# use sqrt + 1 to handle smaller composites where the sqrt is 2.
# we need an initial case to test fail out as False.
for i in xrange(2, sqrt + 1):
if num % i == 0:
return False
return True
def generate_list_of_primes(min, max):
""" Given a min and max generate all the primes in that range. """
assert max > min, "min %s is > than max %s" % (min, max)
primes = []
for x in xrange(min, max):
if is_prime(x):
primes.append(x)
return primes
if __name__ == "__main__":
# check for some simple prime and not prime numbers
assert not is_prime(1), "1 should not be prime"
assert is_prime(5), "5 should be prime"
assert not is_prime(6), "6 should not be prime"
assert not is_prime(100), "100 should be be prime"
assert not is_prime(1000), "1000 should not be prime"
# check the primes from the problem 49 example
assert is_prime(1487), "1487 should be prime"
assert is_prime(4817), "4817 should be prime"
assert is_prime(8147), "8147 should be prime"
for num in generate_list_of_primes(1, 100):
assert is_prime(num), "%s should be prime" % num
for num in generate_list_of_primes(1000, 9999):
assert is_prime(num), "%s should be prime" % num
print "all assertions passed"
|
e4c5ff6901fe7652c7f76f67189058de76406406
|
casepro/cases/forms.py
|
casepro/cases/forms.py
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class PartnerUpdateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerCreateForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class BasePartnerForm(forms.ModelForm):
description = forms.CharField(label=_("Description"), max_length=255, required=False, widget=forms.Textarea)
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(BasePartnerForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class PartnerUpdateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerCreateForm, self).__init__(*args, **kwargs)
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
|
Tweak partner form to use textarea for description
|
Tweak partner form to use textarea for description
|
Python
|
bsd-3-clause
|
praekelt/casepro,xkmato/casepro,rapidpro/casepro,praekelt/casepro,rapidpro/casepro,rapidpro/casepro,praekelt/casepro,xkmato/casepro
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class PartnerUpdateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerCreateForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
Tweak partner form to use textarea for description
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class BasePartnerForm(forms.ModelForm):
description = forms.CharField(label=_("Description"), max_length=255, required=False, widget=forms.Textarea)
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(BasePartnerForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class PartnerUpdateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerCreateForm, self).__init__(*args, **kwargs)
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
|
<commit_before>from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class PartnerUpdateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerCreateForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
<commit_msg>Tweak partner form to use textarea for description<commit_after>
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class BasePartnerForm(forms.ModelForm):
description = forms.CharField(label=_("Description"), max_length=255, required=False, widget=forms.Textarea)
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(BasePartnerForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class PartnerUpdateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerCreateForm, self).__init__(*args, **kwargs)
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class PartnerUpdateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerCreateForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
Tweak partner form to use textarea for descriptionfrom __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class BasePartnerForm(forms.ModelForm):
description = forms.CharField(label=_("Description"), max_length=255, required=False, widget=forms.Textarea)
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(BasePartnerForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class PartnerUpdateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerCreateForm, self).__init__(*args, **kwargs)
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
|
<commit_before>from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class PartnerUpdateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(forms.ModelForm):
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(PartnerCreateForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
<commit_msg>Tweak partner form to use textarea for description<commit_after>from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from casepro.msgs.models import Label
from .models import Partner
class BasePartnerForm(forms.ModelForm):
description = forms.CharField(label=_("Description"), max_length=255, required=False, widget=forms.Textarea)
labels = forms.ModelMultipleChoiceField(label=_("Can Access"),
queryset=Label.objects.none(),
widget=forms.CheckboxSelectMultiple(),
required=False)
def __init__(self, *args, **kwargs):
org = kwargs.pop('org')
super(BasePartnerForm, self).__init__(*args, **kwargs)
self.fields['labels'].queryset = Label.get_all(org).order_by('name')
class PartnerUpdateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerUpdateForm, self).__init__(*args, **kwargs)
self.fields['primary_contact'].queryset = kwargs['instance'].get_users()
class Meta:
model = Partner
fields = ('name', 'description', 'primary_contact', 'logo', 'is_restricted', 'labels')
class PartnerCreateForm(BasePartnerForm):
def __init__(self, *args, **kwargs):
super(PartnerCreateForm, self).__init__(*args, **kwargs)
class Meta:
model = Partner
fields = ('name', 'description', 'logo', 'is_restricted', 'labels')
|
122c833f163aa0ac0a38e91c7b49dca63308db2d
|
sktracker/tracker/solver/solver.py
|
sktracker/tracker/solver/solver.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(self, new_labels=new_labels)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(new_labels=new_labels)
|
Fix a bug - tests pass
|
Fix a bug - tests pass
|
Python
|
bsd-3-clause
|
bnoi/scikit-tracker,bnoi/scikit-tracker,bnoi/scikit-tracker
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(self, new_labels=new_labels)
Fix a bug - tests pass
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(new_labels=new_labels)
|
<commit_before>
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(self, new_labels=new_labels)
<commit_msg>Fix a bug - tests pass<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(new_labels=new_labels)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(self, new_labels=new_labels)
Fix a bug - tests pass
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(new_labels=new_labels)
|
<commit_before>
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(self, new_labels=new_labels)
<commit_msg>Fix a bug - tests pass<commit_after>
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from ...trajectories import Trajectories
__all__ = []
class AbstractSolver(object):
"""
Parameters
----------
trajs : :class:`sktracker.trajectories.Trajectories`
The trajectories
"""
def __init__(self, trajs):
self.trajs = Trajectories(trajs)
def check_cost_function_type(self, obj, cost_funtion_type):
"""Check wether an object inherit from another one.
Parameters
----------
obj : object
cost_funtion_type : class name
Raises
------
TypeError : `obj` type does not inherit from `cost_funtion_type`
"""
error_mess = '''The cost function {} doesn't inherit from {}'''
if not isinstance(obj, cost_funtion_type):
raise TypeError(error_mess.format(obj, cost_funtion_type.__name__))
def relabel_trajs(self, new_labels=None):
"""
Sets the trajectory index `label` to new values.
Parameters
----------
new_labels: :class:`numpy.ndarray` or None, default None
The new label. If it is not provided, the function wil look for
will look for a column named "new_label" in `trajs` and use this
as the new label index
"""
self.trajs.relabel(new_labels=new_labels)
|
a75c51594e225fbada37f1be23cf2de581da29a4
|
keeper/tasks/dashboardbuild.py
|
keeper/tasks/dashboardbuild.py
|
from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query(id=product_id).one()
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
|
from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query.get(product_id)
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
|
Fix getting product in build_dashboard task
|
Fix getting product in build_dashboard task
|
Python
|
mit
|
lsst-sqre/ltd-keeper,lsst-sqre/ltd-keeper
|
from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query(id=product_id).one()
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
Fix getting product in build_dashboard task
|
from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query.get(product_id)
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
|
<commit_before>from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query(id=product_id).one()
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
<commit_msg>Fix getting product in build_dashboard task<commit_after>
|
from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query.get(product_id)
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
|
from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query(id=product_id).one()
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
Fix getting product in build_dashboard taskfrom __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query.get(product_id)
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
|
<commit_before>from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query(id=product_id).one()
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
<commit_msg>Fix getting product in build_dashboard task<commit_after>from __future__ import annotations
from typing import TYPE_CHECKING
from celery.utils.log import get_task_logger
from keeper.celery import celery_app
from keeper.models import Product
from keeper.services.dashboard import build_dashboard as build_dashboard_svc
if TYPE_CHECKING:
import celery.task
__all__ = ["build_dashboard"]
logger = get_task_logger(__name__)
@celery_app.task(bind=True)
def build_dashboard(self: celery.task.Task, product_id: str) -> None:
"""Build a product's dashboard as a Celery task.
Parameters
----------
product_url : `str`
URL of the product resource.
"""
logger.info(
"Starting dashboard build product_id=%s retry=%d",
product_id,
self.request.retries,
)
product = Product.query.get(product_id)
build_dashboard_svc(product, logger)
logger.info("Finished triggering dashboard build")
|
faf1535fcc6c743345485cd388be8979cad3dec2
|
aspen/__main__.py
|
aspen/__main__.py
|
"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
server = make_server('0.0.0.0', 8080, website)
log_dammit("Greetings, program! Welcome to port 8080.")
server.serve_forever()
|
"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
port = int(os.environ.get('PORT', '8080'))
server = make_server('0.0.0.0', port, website)
log_dammit("Greetings, program! Welcome to port {0}.".format(port))
server.serve_forever()
|
Support PORT envvar from `python -m aspen`
|
Support PORT envvar from `python -m aspen`
|
Python
|
mit
|
gratipay/aspen.py,gratipay/aspen.py
|
"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
server = make_server('0.0.0.0', 8080, website)
log_dammit("Greetings, program! Welcome to port 8080.")
server.serve_forever()
Support PORT envvar from `python -m aspen`
|
"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
port = int(os.environ.get('PORT', '8080'))
server = make_server('0.0.0.0', port, website)
log_dammit("Greetings, program! Welcome to port {0}.".format(port))
server.serve_forever()
|
<commit_before>"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
server = make_server('0.0.0.0', 8080, website)
log_dammit("Greetings, program! Welcome to port 8080.")
server.serve_forever()
<commit_msg>Support PORT envvar from `python -m aspen`<commit_after>
|
"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
port = int(os.environ.get('PORT', '8080'))
server = make_server('0.0.0.0', port, website)
log_dammit("Greetings, program! Welcome to port {0}.".format(port))
server.serve_forever()
|
"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
server = make_server('0.0.0.0', 8080, website)
log_dammit("Greetings, program! Welcome to port 8080.")
server.serve_forever()
Support PORT envvar from `python -m aspen`"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
port = int(os.environ.get('PORT', '8080'))
server = make_server('0.0.0.0', port, website)
log_dammit("Greetings, program! Welcome to port {0}.".format(port))
server.serve_forever()
|
<commit_before>"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
server = make_server('0.0.0.0', 8080, website)
log_dammit("Greetings, program! Welcome to port 8080.")
server.serve_forever()
<commit_msg>Support PORT envvar from `python -m aspen`<commit_after>"""
python -m aspen
===============
Aspen ships with a server (wsgiref.simple_server) that is
suitable for development and testing. It can be invoked via:
python -m aspen
though even for development you'll likely want to specify a
project root, so a more likely incantation is:
ASPEN_PROJECT_ROOT=/path/to/wherever python -m aspen
For production deployment, you should probably deploy using
a higher performance WSGI server like Gunicorn, uwsgi, Spawning,
or the like.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
from aspen import log_dammit
from aspen.website import Website
from wsgiref.simple_server import make_server
if __name__ == '__main__':
website = Website()
port = int(os.environ.get('PORT', '8080'))
server = make_server('0.0.0.0', port, website)
log_dammit("Greetings, program! Welcome to port {0}.".format(port))
server.serve_forever()
|
c1e6b61b6da9f17f11ce41bbcdaad61fadc075db
|
serenata_toolbox/datasets/remote.py
|
serenata_toolbox/datasets/remote.py
|
import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY'),
'aws_secret_access_key': config('AMAZON_SECRET_KEY'),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
|
import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None),
'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
|
Make Amazon keys non required
|
Make Amazon keys non required
|
Python
|
mit
|
datasciencebr/serenata-toolbox
|
import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY'),
'aws_secret_access_key': config('AMAZON_SECRET_KEY'),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
Make Amazon keys non required
|
import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None),
'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
|
<commit_before>import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY'),
'aws_secret_access_key': config('AMAZON_SECRET_KEY'),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
<commit_msg>Make Amazon keys non required<commit_after>
|
import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None),
'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
|
import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY'),
'aws_secret_access_key': config('AMAZON_SECRET_KEY'),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
Make Amazon keys non requiredimport os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None),
'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
|
<commit_before>import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY'),
'aws_secret_access_key': config('AMAZON_SECRET_KEY'),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
<commit_msg>Make Amazon keys non required<commit_after>import os
from functools import partial
import boto3
from decouple import config
from serenata_toolbox import log
from serenata_toolbox.datasets.contextmanager import status_message
class RemoteDatasets:
def __init__(self):
self.client = None
self.credentials = {
'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None),
'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None),
'region_name': config('AMAZON_REGION'),
}
@property
def bucket(self):
return config('AMAZON_BUCKET')
@property
def s3(self):
if not self.client:
self.client = boto3.client('s3', **self.credentials)
return self.client
@property
def all(self):
response = self.s3.list_objects(Bucket=self.bucket)
yield from (obj.get('Key') for obj in response.get('Contents', []))
def upload(self, file_path):
_, file_name = os.path.split(file_path)
with status_message('Uploading {}…'.format(file_name)):
self.s3.upload_file(file_path, self.bucket, file_name)
def delete(self, file_name):
with status_message('Deleting {}…'.format(file_name)):
self.s3.delete_object(Bucket=self.bucket, Key=file_name)
|
b202e1cc5e6c5aa65c3ed22ad1e78ec505fa36c4
|
cmsplugin_rst/forms.py
|
cmsplugin_rst/forms.py
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"]
|
Add "fields" attribute to ModelForm.
|
Add "fields" attribute to ModelForm.
|
Python
|
bsd-3-clause
|
pakal/cmsplugin-rst,ojii/cmsplugin-rst
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModelAdd "fields" attribute to ModelForm.
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"]
|
<commit_before>from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel<commit_msg>Add "fields" attribute to ModelForm.<commit_after>
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"]
|
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModelAdd "fields" attribute to ModelForm.from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"]
|
<commit_before>from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel<commit_msg>Add "fields" attribute to ModelForm.<commit_after>from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"]
|
ce241802ec85db638adbf4c81d9002ce5b32a1d4
|
misc/time_scraper.py
|
misc/time_scraper.py
|
from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'TAS', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
|
from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
|
Remove 'TAS' from games blacklist
|
Remove 'TAS' from games blacklist
|
Python
|
mit
|
bcongdon/sgdq-collector,bcongdon/sgdq-collector,bcongdon/sgdq-collector
|
from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'TAS', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
Remove 'TAS' from games blacklist
|
from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
|
<commit_before>from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'TAS', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
<commit_msg>Remove 'TAS' from games blacklist<commit_after>
|
from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
|
from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'TAS', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
Remove 'TAS' from games blacklistfrom bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
|
<commit_before>from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'TAS', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
<commit_msg>Remove 'TAS' from games blacklist<commit_after>from bs4 import BeautifulSoup
import requests
import json
import sys
html = requests.get("http://gamesdonequick.com/schedule").text
soup = BeautifulSoup(html, 'html.parser')
table = soup.find('tbody')
first_rows = table.findAll('tr', attrs={'class': None})
games = list()
for row in first_rows:
second_row = row.findNext('tr', attrs={'class': 'second-row'})
duration = 0
if second_row:
duration = second_row.findNext('td').text.strip()
runner_text = row.find('td', attrs={'rowspan': 2})
runner = runner_text.text.strip() if runner_text else ""
start_time_text = row.find('td', attrs={'class': "start-time"})
start_time = start_time_text.text if start_time_text else ""
game = {
'title': row.find('td', attrs={'class': None}).text,
'duration': duration,
'runner': runner,
'start_time': start_time,
}
games.append(game)
blacklist = ['Pre-Show', 'Setup Block', 'Finale']
games = [x for x in games if not any(x['title'].startswith(b) for b in blacklist)]
if len(sys.argv) == 1 or sys.argv[1] == 'verbose':
print json.dumps(games)
else:
with open('../data_file.json', 'w+') as f:
f.write(json.dumps(games))
|
ecd18d561a795b5de8d77c4de71606924791cf01
|
K2fov/tests/test_plot.py
|
K2fov/tests/test_plot.py
|
"""Tests K2fov.plot"""
from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
|
"""Tests K2fov.plot"""
# from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
|
Simplify plot test for now
|
Simplify plot test for now
|
Python
|
mit
|
mrtommyb/K2fov,KeplerGO/K2fov
|
"""Tests K2fov.plot"""
from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
Simplify plot test for now
|
"""Tests K2fov.plot"""
# from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
|
<commit_before>"""Tests K2fov.plot"""
from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
<commit_msg>Simplify plot test for now<commit_after>
|
"""Tests K2fov.plot"""
# from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
|
"""Tests K2fov.plot"""
from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
Simplify plot test for now"""Tests K2fov.plot"""
# from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
|
<commit_before>"""Tests K2fov.plot"""
from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
<commit_msg>Simplify plot test for now<commit_after>"""Tests K2fov.plot"""
# from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
|
31ec9a0ae45c42c79f0e2edba3f11fc0578f33c4
|
orchard/errors/e500.py
|
orchard/errors/e500.py
|
# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page.
:return: A page explaining the error.
"""
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
|
# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import datetime
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page for internal errors and send a mail to all administrators
information them of this error.
:return: A page explaining the error.
"""
message = ('Time: {time}\n' +
'Request: {method} {path}\n' +
'Agent: {agent_platform} | {agent_browser} {agent_browser_version}\n' +
'Raw Agent: {agent}\n\n'
).format(time = datetime.datetime.now(),
method = flask.request.method,
path = flask.request.path,
agent_platform = flask.request.user_agent.platform,
agent_browser = flask.request.user_agent.browser,
agent_browser_version = flask.request.user_agent.version,
agent = flask.request.user_agent.string)
flask.current_app.logger.exception(message)
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
|
Send mail to admins on all internal server errors.
|
Send mail to admins on all internal server errors.
|
Python
|
mit
|
BMeu/Orchard,BMeu/Orchard
|
# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page.
:return: A page explaining the error.
"""
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
Send mail to admins on all internal server errors.
|
# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import datetime
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page for internal errors and send a mail to all administrators
information them of this error.
:return: A page explaining the error.
"""
message = ('Time: {time}\n' +
'Request: {method} {path}\n' +
'Agent: {agent_platform} | {agent_browser} {agent_browser_version}\n' +
'Raw Agent: {agent}\n\n'
).format(time = datetime.datetime.now(),
method = flask.request.method,
path = flask.request.path,
agent_platform = flask.request.user_agent.platform,
agent_browser = flask.request.user_agent.browser,
agent_browser_version = flask.request.user_agent.version,
agent = flask.request.user_agent.string)
flask.current_app.logger.exception(message)
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
|
<commit_before># -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page.
:return: A page explaining the error.
"""
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
<commit_msg>Send mail to admins on all internal server errors.<commit_after>
|
# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import datetime
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page for internal errors and send a mail to all administrators
information them of this error.
:return: A page explaining the error.
"""
message = ('Time: {time}\n' +
'Request: {method} {path}\n' +
'Agent: {agent_platform} | {agent_browser} {agent_browser_version}\n' +
'Raw Agent: {agent}\n\n'
).format(time = datetime.datetime.now(),
method = flask.request.method,
path = flask.request.path,
agent_platform = flask.request.user_agent.platform,
agent_browser = flask.request.user_agent.browser,
agent_browser_version = flask.request.user_agent.version,
agent = flask.request.user_agent.string)
flask.current_app.logger.exception(message)
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
|
# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page.
:return: A page explaining the error.
"""
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
Send mail to admins on all internal server errors.# -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import datetime
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page for internal errors and send a mail to all administrators
information them of this error.
:return: A page explaining the error.
"""
message = ('Time: {time}\n' +
'Request: {method} {path}\n' +
'Agent: {agent_platform} | {agent_browser} {agent_browser_version}\n' +
'Raw Agent: {agent}\n\n'
).format(time = datetime.datetime.now(),
method = flask.request.method,
path = flask.request.path,
agent_platform = flask.request.user_agent.platform,
agent_browser = flask.request.user_agent.browser,
agent_browser_version = flask.request.user_agent.version,
agent = flask.request.user_agent.string)
flask.current_app.logger.exception(message)
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
|
<commit_before># -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page.
:return: A page explaining the error.
"""
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
<commit_msg>Send mail to admins on all internal server errors.<commit_after># -*- coding: utf-8 -*-
"""
This module sets up the view for handling ``500 Internal Server Error`` errors.
"""
import datetime
import flask
import flask_classful
from orchard.errors import blueprint
class Error500View(flask_classful.FlaskView):
"""
View for ``500 Internal Server Error`` errors.
"""
trailing_slash = False
@blueprint.app_errorhandler(500)
@blueprint.app_errorhandler(Exception)
def index(self) -> str:
"""
Display the error page for internal errors and send a mail to all administrators
information them of this error.
:return: A page explaining the error.
"""
message = ('Time: {time}\n' +
'Request: {method} {path}\n' +
'Agent: {agent_platform} | {agent_browser} {agent_browser_version}\n' +
'Raw Agent: {agent}\n\n'
).format(time = datetime.datetime.now(),
method = flask.request.method,
path = flask.request.path,
agent_platform = flask.request.user_agent.platform,
agent_browser = flask.request.user_agent.browser,
agent_browser_version = flask.request.user_agent.version,
agent = flask.request.user_agent.string)
flask.current_app.logger.exception(message)
return flask.render_template('errors/500.html')
Error500View.register(blueprint)
|
aba13a6b443922dd4f4e97b252c073ab23a223c4
|
awesomeshop/shop/models/category.py
|
awesomeshop/shop/models/category.py
|
# -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
|
# -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db, get_locale
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
@classmethod
def ordered_all(self):
catlist = []
def add_cats(categories, level=0):
for cat in categories:
cat.level = level
catlist.append(cat)
add_cats(cat.children, level+1)
add_cats(self.objects(parent=None))
return catlist
@property
def children(self):
return Category.objects(parent=self)
@property
def products(self):
from .product import Product
return Product.objects(category=self).count()
@property
def full_name(self):
name = self.name.get(get_locale(), '')
if self.parent:
return u'{} » {}'.format(self.parent.full_name, name)
else:
return name
|
Make categories easier to request
|
Make categories easier to request
|
Python
|
agpl-3.0
|
tiramiseb/awesomeshop,tiramiseb/awesomeshop,tiramiseb/awesomeshop,tiramiseb/awesomeshop
|
# -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
Make categories easier to request
|
# -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db, get_locale
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
@classmethod
def ordered_all(self):
catlist = []
def add_cats(categories, level=0):
for cat in categories:
cat.level = level
catlist.append(cat)
add_cats(cat.children, level+1)
add_cats(self.objects(parent=None))
return catlist
@property
def children(self):
return Category.objects(parent=self)
@property
def products(self):
from .product import Product
return Product.objects(category=self).count()
@property
def full_name(self):
name = self.name.get(get_locale(), '')
if self.parent:
return u'{} » {}'.format(self.parent.full_name, name)
else:
return name
|
<commit_before># -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
<commit_msg>Make categories easier to request<commit_after>
|
# -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db, get_locale
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
@classmethod
def ordered_all(self):
catlist = []
def add_cats(categories, level=0):
for cat in categories:
cat.level = level
catlist.append(cat)
add_cats(cat.children, level+1)
add_cats(self.objects(parent=None))
return catlist
@property
def children(self):
return Category.objects(parent=self)
@property
def products(self):
from .product import Product
return Product.objects(category=self).count()
@property
def full_name(self):
name = self.name.get(get_locale(), '')
if self.parent:
return u'{} » {}'.format(self.parent.full_name, name)
else:
return name
|
# -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
Make categories easier to request# -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db, get_locale
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
@classmethod
def ordered_all(self):
catlist = []
def add_cats(categories, level=0):
for cat in categories:
cat.level = level
catlist.append(cat)
add_cats(cat.children, level+1)
add_cats(self.objects(parent=None))
return catlist
@property
def children(self):
return Category.objects(parent=self)
@property
def products(self):
from .product import Product
return Product.objects(category=self).count()
@property
def full_name(self):
name = self.name.get(get_locale(), '')
if self.parent:
return u'{} » {}'.format(self.parent.full_name, name)
else:
return name
|
<commit_before># -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
<commit_msg>Make categories easier to request<commit_after># -*- coding: utf8 -*-
# Copyright 2015 Sébastien Maccagnoni-Munch
#
# This file is part of AwesomeShop.
#
# AwesomeShop is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# AwesomeShop is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with AwesomeShop. If not, see <http://www.gnu.org/licenses/>.
from ... import db, get_locale
from ...mongo import TranslationsField
class Category(db.Document):
rank = db.IntField()
slug = db.StringField()
parent = db.ReferenceField('self', reverse_delete_rule=db.DENY)
name = TranslationsField()
description = TranslationsField()
meta = {
'ordering': ['rank']
}
@classmethod
def ordered_all(self):
catlist = []
def add_cats(categories, level=0):
for cat in categories:
cat.level = level
catlist.append(cat)
add_cats(cat.children, level+1)
add_cats(self.objects(parent=None))
return catlist
@property
def children(self):
return Category.objects(parent=self)
@property
def products(self):
from .product import Product
return Product.objects(category=self).count()
@property
def full_name(self):
name = self.name.get(get_locale(), '')
if self.parent:
return u'{} » {}'.format(self.parent.full_name, name)
else:
return name
|
b456e982e1cbc902fa1aefaf221b058edb6c778f
|
backend/uclapi/oauth/app_helpers.py
|
backend/uclapi/oauth/app_helpers.py
|
from binascii import hexlify
import os
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
|
from binascii import hexlify
import os
import textwrap
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
'-'.join(textwrap.wrap(key, 15))
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
|
Clean up verification code logic, as per @jermenkoo's feedback
|
Clean up verification code logic, as per @jermenkoo's feedback
|
Python
|
mit
|
uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi
|
from binascii import hexlify
import os
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
Clean up verification code logic, as per @jermenkoo's feedback
|
from binascii import hexlify
import os
import textwrap
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
'-'.join(textwrap.wrap(key, 15))
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
|
<commit_before>from binascii import hexlify
import os
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
<commit_msg>Clean up verification code logic, as per @jermenkoo's feedback<commit_after>
|
from binascii import hexlify
import os
import textwrap
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
'-'.join(textwrap.wrap(key, 15))
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
|
from binascii import hexlify
import os
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
Clean up verification code logic, as per @jermenkoo's feedbackfrom binascii import hexlify
import os
import textwrap
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
'-'.join(textwrap.wrap(key, 15))
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
|
<commit_before>from binascii import hexlify
import os
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
<commit_msg>Clean up verification code logic, as per @jermenkoo's feedback<commit_after>from binascii import hexlify
import os
import textwrap
def generate_user_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
'-'.join(textwrap.wrap(key, 15))
final = "uclapi-user" + dashes_key
return final
def generate_random_verification_code():
key = hexlify(os.urandom(40)).decode()
final = "verify" + key
return final
|
dfdb824eb1327a270e1c167e2ed5e161026858ea
|
antxetamedia/multimedia/handlers.py
|
antxetamedia/multimedia/handlers.py
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
Break lowdly on unicode errors
|
Break lowdly on unicode errors
|
Python
|
agpl-3.0
|
GISAElkartea/antxetamedia,GISAElkartea/antxetamedia,GISAElkartea/antxetamedia
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
Break lowdly on unicode errors
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
<commit_before>from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
<commit_msg>Break lowdly on unicode errors<commit_after>
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
Break lowdly on unicode errorsfrom boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
<commit_before>from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except (S3ResponseError, UnicodeDecodeError):
bucket = conn.create_bucket(bucket)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
<commit_msg>Break lowdly on unicode errors<commit_after>from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3ResponseError, S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
while bucket.endswith('-'):
bucket = bucket[:-1]
try:
bucket = conn.get_bucket(bucket)
except S3ResponseError:
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
key = bucket.new_key(key)
try:
key.set_contents_from_file(fd)
except S3ResponseError:
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
6ae9fa16eece7972b9b5f8e90f3b41a7d00fcabe
|
solutions/python/array-pair-sum.py
|
solutions/python/array-pair-sum.py
|
"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
|
"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
|
Modify styling to a proper one
|
Modify styling to a proper one
|
Python
|
mit
|
patrickford/code-problems,caoglish/code-problems,Widea/code-problems,diversedition/code-problems,dwatson3/code-problems,diversedition/code-problems,nacho-gil/code-problems,sisirkoppaka/code-problems,ranveer-git/code-problems,AndrewKishino/code-problems,sisirkoppaka/code-problems,tahoeRobbo/code-problems,diversedition/code-problems,sisirkoppaka/code-problems,modulexcite/code-problems,SterlingVix/code-problems,nickell-andrew/code-problems,angelkar/code-problems,nacho-gil/code-problems,dwatson3/code-problems,netuoso/code-problems,ockang/code-problems,saurabhjn76/code-problems,nacho-gil/code-problems,marcoviappiani/code-problems,ockang/code-problems,cjjavellana/code-problems,nickell-andrew/code-problems,rkho/code-problems,faruzzy/code-problems,ankur-anand/code-problems,tahoeRobbo/code-problems,cjjavellana/code-problems,hlan2/code-problems,angelkar/code-problems,hlan2/code-problems,tahoeRobbo/code-problems,hlan2/code-problems,faruzzy/code-problems,jmera/code-problems,lgulliver/code-problems,patrickford/code-problems,modulexcite/code-problems,SterlingVix/code-problems,AndrewKishino/code-problems,BastinRobin/code-problems,lgulliver/code-problems,hlan2/code-problems,jmera/code-problems,jmera/code-problems,ankur-anand/code-problems,akaragkiozidis/code-problems,ockang/code-problems,SterlingVix/code-problems,diversedition/code-problems,caoglish/code-problems,ockang/code-problems,BastinRobin/code-problems,angelkar/code-problems,nacho-gil/code-problems,cjjavellana/code-problems,Widea/code-problems,rkho/code-problems,nacho-gil/code-problems,ankur-anand/code-problems,AndrewKishino/code-problems,ockang/code-problems,sisirkoppaka/code-problems,saurabhjn76/code-problems,sethdame/code-problems,caoglish/code-problems,dwatson3/code-problems,tahoeRobbo/code-problems,saurabhjn76/code-problems,aloisdg/code-problems,jmera/code-problems,aloisdg/code-problems,SterlingVix/code-problems,ranveer-git/code-problems,sethdame/code-problems,rkho/code-problems,caoglish/code-problems,marcoviappiani/code-problems,sisirkoppaka/code-problems,AndrewKishino/code-problems,nickell-andrew/code-problems,diversedition/code-problems,tahoeRobbo/code-problems,lgulliver/code-problems,ockang/code-problems,Widea/code-problems,sethdame/code-problems,nickell-andrew/code-problems,modulexcite/code-problems,ankur-anand/code-problems,BastinRobin/code-problems,ankur-anand/code-problems,marcoviappiani/code-problems,tahoeRobbo/code-problems,hlan2/code-problems,diversedition/code-problems,ranveer-git/code-problems,caoglish/code-problems,Widea/code-problems,SterlingVix/code-problems,angelkar/code-problems,akaragkiozidis/code-problems,nacho-gil/code-problems,sisirkoppaka/code-problems,Widea/code-problems,ranveer-git/code-problems,BastinRobin/code-problems,cjjavellana/code-problems,sethdame/code-problems,angelkar/code-problems,aloisdg/code-problems,jefimenko/code-problems,Widea/code-problems,faruzzy/code-problems,rkho/code-problems,SterlingVix/code-problems,nickell-andrew/code-problems,jefimenko/code-problems,netuoso/code-problems,caoglish/code-problems,dwatson3/code-problems,rkho/code-problems,faruzzy/code-problems,rkho/code-problems,tahoeRobbo/code-problems,nacho-gil/code-problems,AndrewKishino/code-problems,Widea/code-problems,ranveer-git/code-problems,sisirkoppaka/code-problems,ockang/code-problems,faruzzy/code-problems,cjjavellana/code-problems,SterlingVix/code-problems,netuoso/code-problems,modulexcite/code-problems,dwatson3/code-problems,nickell-andrew/code-problems,caoglish/code-problems,tahoeRobbo/code-problems,saurabhjn76/code-problems,jefimenko/code-problems,lgulliver/code-problems,sethdame/code-problems,jmera/code-problems,sisirkoppaka/code-problems,AndrewKishino/code-problems,patrickford/code-problems,marcoviappiani/code-problems,modulexcite/code-problems,netuoso/code-problems,akaragkiozidis/code-problems,Widea/code-problems,caoglish/code-problems,hlan2/code-problems,lgulliver/code-problems,saurabhjn76/code-problems,ranveer-git/code-problems,AndrewKishino/code-problems,saurabhjn76/code-problems,tahoeRobbo/code-problems,ankur-anand/code-problems,hlan2/code-problems,marcoviappiani/code-problems,blakeembrey/code-problems,ockang/code-problems,Widea/code-problems,diversedition/code-problems,akaragkiozidis/code-problems,ankur-anand/code-problems,BastinRobin/code-problems,ranveer-git/code-problems,nickell-andrew/code-problems,akaragkiozidis/code-problems,AndrewKishino/code-problems,blakeembrey/code-problems,patrickford/code-problems,angelkar/code-problems,lgulliver/code-problems,jmera/code-problems,modulexcite/code-problems,aloisdg/code-problems,caoglish/code-problems,akaragkiozidis/code-problems,BastinRobin/code-problems,jmera/code-problems,cjjavellana/code-problems,ankur-anand/code-problems,angelkar/code-problems,hlan2/code-problems,nacho-gil/code-problems,patrickford/code-problems,patrickford/code-problems,blakeembrey/code-problems,akaragkiozidis/code-problems,sisirkoppaka/code-problems,jmera/code-problems,marcoviappiani/code-problems,patrickford/code-problems,nickell-andrew/code-problems,blakeembrey/code-problems,netuoso/code-problems,saurabhjn76/code-problems,saurabhjn76/code-problems,ankur-anand/code-problems,saurabhjn76/code-problems,dwatson3/code-problems,rkho/code-problems,aloisdg/code-problems,nacho-gil/code-problems,sethdame/code-problems,nacho-gil/code-problems,angelkar/code-problems,lgulliver/code-problems,cjjavellana/code-problems,caoglish/code-problems,netuoso/code-problems,aloisdg/code-problems,ockang/code-problems,faruzzy/code-problems,jefimenko/code-problems,aloisdg/code-problems,SterlingVix/code-problems,ankur-anand/code-problems,dwatson3/code-problems,sethdame/code-problems,SterlingVix/code-problems,tahoeRobbo/code-problems,saurabhjn76/code-problems,faruzzy/code-problems,Widea/code-problems,jefimenko/code-problems,ranveer-git/code-problems,blakeembrey/code-problems,hlan2/code-problems,jefimenko/code-problems,faruzzy/code-problems,ranveer-git/code-problems,blakeembrey/code-problems,modulexcite/code-problems,nickell-andrew/code-problems,modulexcite/code-problems,AndrewKishino/code-problems,patrickford/code-problems,blakeembrey/code-problems,modulexcite/code-problems,BastinRobin/code-problems,jefimenko/code-problems,netuoso/code-problems,dwatson3/code-problems,jmera/code-problems,cjjavellana/code-problems,marcoviappiani/code-problems,rkho/code-problems,angelkar/code-problems,dwatson3/code-problems,diversedition/code-problems,nickell-andrew/code-problems,ockang/code-problems,aloisdg/code-problems,aloisdg/code-problems,marcoviappiani/code-problems,BastinRobin/code-problems,SterlingVix/code-problems,sethdame/code-problems,lgulliver/code-problems,blakeembrey/code-problems,blakeembrey/code-problems,marcoviappiani/code-problems,diversedition/code-problems,hlan2/code-problems,cjjavellana/code-problems,sethdame/code-problems,patrickford/code-problems,angelkar/code-problems,cjjavellana/code-problems,jmera/code-problems,netuoso/code-problems,netuoso/code-problems,faruzzy/code-problems,BastinRobin/code-problems,akaragkiozidis/code-problems,ranveer-git/code-problems,dwatson3/code-problems,akaragkiozidis/code-problems,sethdame/code-problems,patrickford/code-problems,rkho/code-problems,sisirkoppaka/code-problems,lgulliver/code-problems,lgulliver/code-problems,jefimenko/code-problems,modulexcite/code-problems,blakeembrey/code-problems,jefimenko/code-problems,akaragkiozidis/code-problems,rkho/code-problems,AndrewKishino/code-problems,faruzzy/code-problems,jefimenko/code-problems,aloisdg/code-problems,marcoviappiani/code-problems
|
"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
Modify styling to a proper one
|
"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
|
<commit_before>"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
<commit_msg>Modify styling to a proper one<commit_after>
|
"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
|
"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
Modify styling to a proper one"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
|
<commit_before>"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
<commit_msg>Modify styling to a proper one<commit_after>"""solution to the array pair sum problem"""
def pair_sum_arrays(k, arr):
"""returns the array of pairs using an iterative method."""
result = []
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] + arr[j] == k:
result.append([arr[i], arr[j]])
return result
|
918df7244162581fb57f301ccf6a4bf4b96ce541
|
npc/formatters/__init__.py
|
npc/formatters/__init__.py
|
from . import markdown, json
|
"""
Character listing formatters
These modules encapsulate the logic needed to create a character listing in
various formats. Each module has a single `dump` entry point which accepts, at
minimum, the characters to list and where to put them. Other args are available
in each linter.
"""
from . import markdown, json
|
Add docstring to formatters package
|
Add docstring to formatters package
|
Python
|
mit
|
aurule/npc,aurule/npc
|
from . import markdown, json
Add docstring to formatters package
|
"""
Character listing formatters
These modules encapsulate the logic needed to create a character listing in
various formats. Each module has a single `dump` entry point which accepts, at
minimum, the characters to list and where to put them. Other args are available
in each linter.
"""
from . import markdown, json
|
<commit_before>from . import markdown, json
<commit_msg>Add docstring to formatters package<commit_after>
|
"""
Character listing formatters
These modules encapsulate the logic needed to create a character listing in
various formats. Each module has a single `dump` entry point which accepts, at
minimum, the characters to list and where to put them. Other args are available
in each linter.
"""
from . import markdown, json
|
from . import markdown, json
Add docstring to formatters package"""
Character listing formatters
These modules encapsulate the logic needed to create a character listing in
various formats. Each module has a single `dump` entry point which accepts, at
minimum, the characters to list and where to put them. Other args are available
in each linter.
"""
from . import markdown, json
|
<commit_before>from . import markdown, json
<commit_msg>Add docstring to formatters package<commit_after>"""
Character listing formatters
These modules encapsulate the logic needed to create a character listing in
various formats. Each module has a single `dump` entry point which accepts, at
minimum, the characters to list and where to put them. Other args are available
in each linter.
"""
from . import markdown, json
|
8a9e58d2170e3f06228cbc0257d41f0c969da957
|
tangled/website/resources.py
|
tangled/website/resources.py
|
from tangled.web import Resource, represent
from tangled.site.resources.entry import Entry
class Docs(Entry):
@represent('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
Replace @represent w/ @config throughout
|
Replace @represent w/ @config throughout
New name, same functionality.
|
Python
|
mit
|
TangledWeb/tangled.website
|
from tangled.web import Resource, represent
from tangled.site.resources.entry import Entry
class Docs(Entry):
@represent('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
Replace @represent w/ @config throughout
New name, same functionality.
|
from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
<commit_before>from tangled.web import Resource, represent
from tangled.site.resources.entry import Entry
class Docs(Entry):
@represent('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
<commit_msg>Replace @represent w/ @config throughout
New name, same functionality.<commit_after>
|
from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
from tangled.web import Resource, represent
from tangled.site.resources.entry import Entry
class Docs(Entry):
@represent('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
Replace @represent w/ @config throughout
New name, same functionality.from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
<commit_before>from tangled.web import Resource, represent
from tangled.site.resources.entry import Entry
class Docs(Entry):
@represent('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
<commit_msg>Replace @represent w/ @config throughout
New name, same functionality.<commit_after>from tangled.web import Resource, config
from tangled.site.resources.entry import Entry
class Docs(Entry):
@config('text/html', template_name='tangled.website:templates/docs.mako')
def GET(self):
static_dirs = self.app.get_all('static_directory', as_dict=True)
links = []
for prefix, dir_app in static_dirs.items():
if prefix[0] == 'docs':
links.append({
'href': '/'.join(prefix),
'text': prefix[1],
})
self.urlvars['id'] = 'docs'
data = super().GET()
data['links'] = sorted(links, key=lambda i: i['text'])
return data
|
70c9b92b72edb4612bc05d166c6e1c8539c8c076
|
opps/article/models.py
|
opps/article/models.py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.core.models.image import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.image.models import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
|
Change namespace image app, use opps image
|
Change namespace image app, use opps image
|
Python
|
mit
|
williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,opps/opps,YACOWS/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.core.models.image import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
Change namespace image app, use opps image
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.image.models import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
|
<commit_before># -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.core.models.image import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
<commit_msg>Change namespace image app, use opps image<commit_after>
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.image.models import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.core.models.image import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
Change namespace image app, use opps image# -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.image.models import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
|
<commit_before># -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.core.models.image import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
<commit_msg>Change namespace image app, use opps image<commit_after># -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from opps.core.models import Article
from opps.image.models import Image
from opps.core.models import Source
class Post(Article):
images = models.ManyToManyField(Image, null=True, blank=True,
related_name='post_images', through='PostImage')
class PostImage(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postimage_post',
on_delete=models.SET_NULL)
image = models.ForeignKey(Image, verbose_name=_(u'Image'), null=True,
blank=True, related_name='postimage_image',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.image.title
class PostSource(models.Model):
post = models.ForeignKey(Post, verbose_name=_(u'Post'), null=True,
blank=True, related_name='postsource_post',
on_delete=models.SET_NULL)
source = models.ForeignKey(Source, verbose_name=_(u'Source'), null=True,
blank=True, related_name='postsource_source',
on_delete=models.SET_NULL)
order = models.PositiveIntegerField(_(u'Order'), default=1)
def __unicode__(self):
return self.source.slug
|
8764739298b3db76cfb7fba9768502304812a938
|
scores.py
|
scores.py
|
from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
|
from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in sorted_players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
|
Fix bug that didn't return the right leaderboard
|
Fix bug that didn't return the right leaderboard
|
Python
|
mit
|
radekj/poke-battle,skooda/poke-battle
|
from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
Fix bug that didn't return the right leaderboard
|
from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in sorted_players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
|
<commit_before>from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
<commit_msg>Fix bug that didn't return the right leaderboard<commit_after>
|
from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in sorted_players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
|
from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
Fix bug that didn't return the right leaderboardfrom nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in sorted_players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
|
<commit_before>from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
<commit_msg>Fix bug that didn't return the right leaderboard<commit_after>from nameko.rpc import rpc, RpcProxy
from nameko.events import event_handler
class ScoreService(object):
name = 'score_service'
player_rpc = RpcProxy('players_service')
@rpc
def leaderboard(self):
players = self.player_rpc.get_players()
sorted_players = sorted(players, key=lambda player: player.score, reverse=True)
return [(p.name, p.score) for p in sorted_players]
@event_handler('battle_service', 'battle_finished')
def update_players_score(self, data):
# NOTE: for now the winner gets 2 points and the loser 1
_, winner, loser = data
self.player_rpc.get_player(winner).add_score(2)
self.player_rpc.get_player(loser).add_score(1)
|
6dfddd2502ea3ea2682f6caa8824768987d477f3
|
facilities/tests/test_pracititioner_models.py
|
facilities/tests/test_pracititioner_models.py
|
from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
|
from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval,
InspectionReport,
CoverTemplateReport
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval, InspectionReport, CoverTemplateReport
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
|
Add tests for inspection reports and cover report templates.
|
Add tests for inspection reports and cover report templates.
|
Python
|
mit
|
urandu/mfl_api,urandu/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,urandu/mfl_api
|
from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
Add tests for inspection reports and cover report templates.
|
from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval,
InspectionReport,
CoverTemplateReport
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval, InspectionReport, CoverTemplateReport
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
|
<commit_before>from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
<commit_msg>Add tests for inspection reports and cover report templates.<commit_after>
|
from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval,
InspectionReport,
CoverTemplateReport
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval, InspectionReport, CoverTemplateReport
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
|
from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
Add tests for inspection reports and cover report templates.from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval,
InspectionReport,
CoverTemplateReport
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval, InspectionReport, CoverTemplateReport
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
|
<commit_before>from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
<commit_msg>Add tests for inspection reports and cover report templates.<commit_after>from django.test import TestCase
from model_mommy import mommy
from ..models import (
PracticeType,
Speciality,
Qualification,
PractitionerQualification,
PractitionerContact,
PractitionerFacility,
Practitioner,
ServiceCategory,
Option,
Service,
FacilityService,
ServiceOption,
ServiceRating,
FacilityApproval,
InspectionReport,
CoverTemplateReport
)
class TestModels(TestCase):
def test_save(self):
models = [
PracticeType, Speciality, Qualification,
PractitionerQualification, PractitionerContact,
PractitionerFacility, Practitioner, ServiceCategory, Option,
Service, FacilityService, ServiceOption, ServiceRating,
FacilityApproval, InspectionReport, CoverTemplateReport
]
for model_cls in models:
obj = mommy.make(model_cls)
self.assertNotEquals(0, len(model_cls.objects.all()))
# a naive way to test unicodes for coverage purposes only
try:
self.assertIsInstance(obj.__unicode__(), str)
except AssertionError:
self.assertIsInstance(obj.__unicode__(), unicode)
|
acbbc3b8fa9032ced750136aab656ec84c9a4f50
|
pastamaker/__init__.py
|
pastamaker/__init__.py
|
# -*- encoding: utf-8 -*-
#
# Copyright © 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
import pastamaker.env
except ImportError:
pass
|
Allow to load env from a python file
|
Allow to load env from a python file
|
Python
|
apache-2.0
|
sileht/pastamaker,sileht/pastamaker,sileht/pastamaker
|
Allow to load env from a python file
|
# -*- encoding: utf-8 -*-
#
# Copyright © 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
import pastamaker.env
except ImportError:
pass
|
<commit_before><commit_msg>Allow to load env from a python file<commit_after>
|
# -*- encoding: utf-8 -*-
#
# Copyright © 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
import pastamaker.env
except ImportError:
pass
|
Allow to load env from a python file# -*- encoding: utf-8 -*-
#
# Copyright © 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
import pastamaker.env
except ImportError:
pass
|
<commit_before><commit_msg>Allow to load env from a python file<commit_after># -*- encoding: utf-8 -*-
#
# Copyright © 2018 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
import pastamaker.env
except ImportError:
pass
|
|
404477f4414b921d127a6744f60cddad6cbdeca1
|
scrape.py
|
scrape.py
|
import discord
import asyncio
from tqdm import tqdm
from sys import argv
script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(fileName, 'w')
print(fileName, 'has been opened.')
messageCount = 0
channel = discord.Object(id='107634811132231680')
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
#messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
|
import discord
import asyncio
from tqdm import tqdm
#from sys import argv
import argparse
parser = argparse.ArgumentParser(description='Discord channel scraper')
requiredNamed = parser.add_argument_group('Required arguments:')
requiredNamed.add_argument('-c', '--channel', type=str, help='Channel to scrape. Requires the channel ID.', required=True)
requiredNamed.add_argument('-o', '--output', type=str, help='Output file in form *.txt. Will be stored in the same directory.', required=True)
args = parser.parse_args()
print(args.channel)
print(args.output)
#script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(args.output, 'w')
print(args.output, 'has been opened.')
messageCount = 0
channel = discord.Object(id=args.channel)
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
|
Change to use argparse to parse arguments
|
Change to use argparse to parse arguments
|
Python
|
mit
|
suclearnub/discordgrapher
|
import discord
import asyncio
from tqdm import tqdm
from sys import argv
script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(fileName, 'w')
print(fileName, 'has been opened.')
messageCount = 0
channel = discord.Object(id='107634811132231680')
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
#messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
Change to use argparse to parse arguments
|
import discord
import asyncio
from tqdm import tqdm
#from sys import argv
import argparse
parser = argparse.ArgumentParser(description='Discord channel scraper')
requiredNamed = parser.add_argument_group('Required arguments:')
requiredNamed.add_argument('-c', '--channel', type=str, help='Channel to scrape. Requires the channel ID.', required=True)
requiredNamed.add_argument('-o', '--output', type=str, help='Output file in form *.txt. Will be stored in the same directory.', required=True)
args = parser.parse_args()
print(args.channel)
print(args.output)
#script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(args.output, 'w')
print(args.output, 'has been opened.')
messageCount = 0
channel = discord.Object(id=args.channel)
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
|
<commit_before>import discord
import asyncio
from tqdm import tqdm
from sys import argv
script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(fileName, 'w')
print(fileName, 'has been opened.')
messageCount = 0
channel = discord.Object(id='107634811132231680')
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
#messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
<commit_msg>Change to use argparse to parse arguments<commit_after>
|
import discord
import asyncio
from tqdm import tqdm
#from sys import argv
import argparse
parser = argparse.ArgumentParser(description='Discord channel scraper')
requiredNamed = parser.add_argument_group('Required arguments:')
requiredNamed.add_argument('-c', '--channel', type=str, help='Channel to scrape. Requires the channel ID.', required=True)
requiredNamed.add_argument('-o', '--output', type=str, help='Output file in form *.txt. Will be stored in the same directory.', required=True)
args = parser.parse_args()
print(args.channel)
print(args.output)
#script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(args.output, 'w')
print(args.output, 'has been opened.')
messageCount = 0
channel = discord.Object(id=args.channel)
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
|
import discord
import asyncio
from tqdm import tqdm
from sys import argv
script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(fileName, 'w')
print(fileName, 'has been opened.')
messageCount = 0
channel = discord.Object(id='107634811132231680')
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
#messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
Change to use argparse to parse argumentsimport discord
import asyncio
from tqdm import tqdm
#from sys import argv
import argparse
parser = argparse.ArgumentParser(description='Discord channel scraper')
requiredNamed = parser.add_argument_group('Required arguments:')
requiredNamed.add_argument('-c', '--channel', type=str, help='Channel to scrape. Requires the channel ID.', required=True)
requiredNamed.add_argument('-o', '--output', type=str, help='Output file in form *.txt. Will be stored in the same directory.', required=True)
args = parser.parse_args()
print(args.channel)
print(args.output)
#script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(args.output, 'w')
print(args.output, 'has been opened.')
messageCount = 0
channel = discord.Object(id=args.channel)
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
|
<commit_before>import discord
import asyncio
from tqdm import tqdm
from sys import argv
script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(fileName, 'w')
print(fileName, 'has been opened.')
messageCount = 0
channel = discord.Object(id='107634811132231680')
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
#messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
<commit_msg>Change to use argparse to parse arguments<commit_after>import discord
import asyncio
from tqdm import tqdm
#from sys import argv
import argparse
parser = argparse.ArgumentParser(description='Discord channel scraper')
requiredNamed = parser.add_argument_group('Required arguments:')
requiredNamed.add_argument('-c', '--channel', type=str, help='Channel to scrape. Requires the channel ID.', required=True)
requiredNamed.add_argument('-o', '--output', type=str, help='Output file in form *.txt. Will be stored in the same directory.', required=True)
args = parser.parse_args()
print(args.channel)
print(args.output)
#script, fileName = argv
client = discord.Client()
@client.event
async def on_ready():
print('Connection successful.')
print('ID: ' + client.user.id)
print('-----')
target = open(args.output, 'w')
print(args.output, 'has been opened.')
messageCount = 0
channel = discord.Object(id=args.channel)
print('Scraping messages...')
with tqdm(leave=True,unit='messages') as scraped:
async for msg in client.logs_from(channel, 10000000000):
line = "{} - {}: {}".format(msg.timestamp,msg.author.name, msg.content)
line = line.encode('utf-8')
toWrite = "{}".format(line)
target.write(toWrite)
target.write("\n")
messageCount += 1
#print(messageCount)
#print(msg.author,msg.content)
scraped.update(1)
print('-----')
print('Scraping complete.')
#----------------------------
client.run('email', 'password')
|
c1dd4cc4f9fcfdac4734270d6130aff4fca4e4b8
|
oneflow/base/utils.py
|
oneflow/base/utils.py
|
# -*- coding: utf-8 -*-
from django.template import RequestContext
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
email_data = EmailContent.objects.filter(name=email_template_name)
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
subject=email_data.subject,
recipients=[user.email],
context=RequestContext(request, {
'email_subject': email_data.subject,
'email_body': email_data.body}),
#post_send=post_send(user)
**kwargs)
|
# -*- coding: utf-8 -*-
from django.template import RequestContext, Template
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
# Prepare for the first rendering pass (Django)
request_context = RequestContext(request)
email_data = EmailContent.objects.get(name=email_template_name)
# Pre-render templates for the mail HTML content.
# email subject is mapped to <title> and <h1>.
stemplate = Template(email_data.subject)
email_subject = stemplate.render(request_context)
btemplate = Template(email_data.body)
email_body = btemplate.render(request_context)
# Update for the second rendering pass (Markdown in Django)
request_context.update({'email_subject': email_subject,
'email_body': email_body, })
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
# We intentionaly pass the unrendered subject string,
# because it will be rendered independantly in the
# send_mail… function (cf. there for details).
subject=email_data.subject,
recipients=[user.email],
context=request_context,
#post_send=post_send(user)
**kwargs)
|
Enable 2-pass rendering in html mails with db contents (one django for variables, one markdown for auto HTML/txt output).
|
Enable 2-pass rendering in html mails with db contents (one django for variables, one markdown for auto HTML/txt output).
|
Python
|
agpl-3.0
|
1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow
|
# -*- coding: utf-8 -*-
from django.template import RequestContext
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
email_data = EmailContent.objects.filter(name=email_template_name)
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
subject=email_data.subject,
recipients=[user.email],
context=RequestContext(request, {
'email_subject': email_data.subject,
'email_body': email_data.body}),
#post_send=post_send(user)
**kwargs)
Enable 2-pass rendering in html mails with db contents (one django for variables, one markdown for auto HTML/txt output).
|
# -*- coding: utf-8 -*-
from django.template import RequestContext, Template
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
# Prepare for the first rendering pass (Django)
request_context = RequestContext(request)
email_data = EmailContent.objects.get(name=email_template_name)
# Pre-render templates for the mail HTML content.
# email subject is mapped to <title> and <h1>.
stemplate = Template(email_data.subject)
email_subject = stemplate.render(request_context)
btemplate = Template(email_data.body)
email_body = btemplate.render(request_context)
# Update for the second rendering pass (Markdown in Django)
request_context.update({'email_subject': email_subject,
'email_body': email_body, })
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
# We intentionaly pass the unrendered subject string,
# because it will be rendered independantly in the
# send_mail… function (cf. there for details).
subject=email_data.subject,
recipients=[user.email],
context=request_context,
#post_send=post_send(user)
**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from django.template import RequestContext
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
email_data = EmailContent.objects.filter(name=email_template_name)
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
subject=email_data.subject,
recipients=[user.email],
context=RequestContext(request, {
'email_subject': email_data.subject,
'email_body': email_data.body}),
#post_send=post_send(user)
**kwargs)
<commit_msg>Enable 2-pass rendering in html mails with db contents (one django for variables, one markdown for auto HTML/txt output).<commit_after>
|
# -*- coding: utf-8 -*-
from django.template import RequestContext, Template
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
# Prepare for the first rendering pass (Django)
request_context = RequestContext(request)
email_data = EmailContent.objects.get(name=email_template_name)
# Pre-render templates for the mail HTML content.
# email subject is mapped to <title> and <h1>.
stemplate = Template(email_data.subject)
email_subject = stemplate.render(request_context)
btemplate = Template(email_data.body)
email_body = btemplate.render(request_context)
# Update for the second rendering pass (Markdown in Django)
request_context.update({'email_subject': email_subject,
'email_body': email_body, })
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
# We intentionaly pass the unrendered subject string,
# because it will be rendered independantly in the
# send_mail… function (cf. there for details).
subject=email_data.subject,
recipients=[user.email],
context=request_context,
#post_send=post_send(user)
**kwargs)
|
# -*- coding: utf-8 -*-
from django.template import RequestContext
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
email_data = EmailContent.objects.filter(name=email_template_name)
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
subject=email_data.subject,
recipients=[user.email],
context=RequestContext(request, {
'email_subject': email_data.subject,
'email_body': email_data.body}),
#post_send=post_send(user)
**kwargs)
Enable 2-pass rendering in html mails with db contents (one django for variables, one markdown for auto HTML/txt output).# -*- coding: utf-8 -*-
from django.template import RequestContext, Template
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
# Prepare for the first rendering pass (Django)
request_context = RequestContext(request)
email_data = EmailContent.objects.get(name=email_template_name)
# Pre-render templates for the mail HTML content.
# email subject is mapped to <title> and <h1>.
stemplate = Template(email_data.subject)
email_subject = stemplate.render(request_context)
btemplate = Template(email_data.body)
email_body = btemplate.render(request_context)
# Update for the second rendering pass (Markdown in Django)
request_context.update({'email_subject': email_subject,
'email_body': email_body, })
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
# We intentionaly pass the unrendered subject string,
# because it will be rendered independantly in the
# send_mail… function (cf. there for details).
subject=email_data.subject,
recipients=[user.email],
context=request_context,
#post_send=post_send(user)
**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from django.template import RequestContext
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
email_data = EmailContent.objects.filter(name=email_template_name)
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
subject=email_data.subject,
recipients=[user.email],
context=RequestContext(request, {
'email_subject': email_data.subject,
'email_body': email_data.body}),
#post_send=post_send(user)
**kwargs)
<commit_msg>Enable 2-pass rendering in html mails with db contents (one django for variables, one markdown for auto HTML/txt output).<commit_after># -*- coding: utf-8 -*-
from django.template import RequestContext, Template
from sparks.django import mail
from models import EmailContent
def send_email_with_db_content(request, email_template_name, user, **kwargs):
def post_send(user, email_template_name):
# TODO: implement me for real!
# I just wrote this function the way I wanted it to act,
# but user.log_email_sent() doesn't exist yet.
def post_send_log_mail_sent():
user.log_email_sent(email_template_name)
return post_send_log_mail_sent
# Prepare for the first rendering pass (Django)
request_context = RequestContext(request)
email_data = EmailContent.objects.get(name=email_template_name)
# Pre-render templates for the mail HTML content.
# email subject is mapped to <title> and <h1>.
stemplate = Template(email_data.subject)
email_subject = stemplate.render(request_context)
btemplate = Template(email_data.body)
email_body = btemplate.render(request_context)
# Update for the second rendering pass (Markdown in Django)
request_context.update({'email_subject': email_subject,
'email_body': email_body, })
mail.send_mail_html_from_template(
'emails/email_with_db_content.html',
# We intentionaly pass the unrendered subject string,
# because it will be rendered independantly in the
# send_mail… function (cf. there for details).
subject=email_data.subject,
recipients=[user.email],
context=request_context,
#post_send=post_send(user)
**kwargs)
|
1de12c334d79a5dd79ba0b0c053fa0b1287c3ad5
|
argonauts/templatetags/argonauts.py
|
argonauts/templatetags/argonauts.py
|
from __future__ import absolute_import
# `setlocale` is not threadsafe
import locale
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
|
from __future__ import absolute_import
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
|
Remove unnecessary setlocale call in templatetags
|
Remove unnecessary setlocale call in templatetags
|
Python
|
bsd-2-clause
|
fusionbox/django-argonauts
|
from __future__ import absolute_import
# `setlocale` is not threadsafe
import locale
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
Remove unnecessary setlocale call in templatetags
|
from __future__ import absolute_import
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
|
<commit_before>from __future__ import absolute_import
# `setlocale` is not threadsafe
import locale
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
<commit_msg>Remove unnecessary setlocale call in templatetags<commit_after>
|
from __future__ import absolute_import
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
|
from __future__ import absolute_import
# `setlocale` is not threadsafe
import locale
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
Remove unnecessary setlocale call in templatetagsfrom __future__ import absolute_import
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
|
<commit_before>from __future__ import absolute_import
# `setlocale` is not threadsafe
import locale
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
<commit_msg>Remove unnecessary setlocale call in templatetags<commit_after>from __future__ import absolute_import
from json import dumps as json_dumps
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
from argonauts.serializers import JSONArgonautEncoder
register = template.Library()
@register.filter
def json(a):
"""
Output the json encoding of its argument.
This will escape all the HTML/XML special characters with their unicode
escapes, so it is safe to be output anywhere except for inside a tag
attribute.
If the output needs to be put in an attribute, entitize the output of this
filter.
"""
kwargs = {}
if settings.DEBUG:
kwargs['indent'] = 4
kwargs['separators'] = (',', ': ')
json_str = json_dumps(a, cls=JSONArgonautEncoder, **kwargs)
# Escape all the XML/HTML special characters.
escapes = ['<', '>', '&']
for c in escapes:
json_str = json_str.replace(c, r'\u%04x' % ord(c))
# now it's safe to use mark_safe
return mark_safe(json_str)
json.is_safe = True
|
32d9f627e9d592a693e7cc3e778463ebb6dd796d
|
busstops/management/commands/correct_operator_regions.py
|
busstops/management/commands/correct_operator_regions.py
|
from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions)
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
maybe_move_operator(operator, regions)
|
from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions):
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
|
Fix rough command for correcting operator regions
|
Fix rough command for correcting operator regions
|
Python
|
mpl-2.0
|
jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk
|
from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions)
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
maybe_move_operator(operator, regions)
Fix rough command for correcting operator regions
|
from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions):
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
|
<commit_before>from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions)
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
maybe_move_operator(operator, regions)
<commit_msg>Fix rough command for correcting operator regions<commit_after>
|
from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions):
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
|
from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions)
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
maybe_move_operator(operator, regions)
Fix rough command for correcting operator regionsfrom django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions):
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
|
<commit_before>from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions)
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
maybe_move_operator(operator, regions)
<commit_msg>Fix rough command for correcting operator regions<commit_after>from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def maybe_move_operator(operator, regions):
if bool(regions) and operator.region not in regions:
if len(regions) == 1:
print 'moving', operator, 'from', operator.region, 'to', regions[0]
operator.region = regions[0]
operator.save()
else:
print operator, operator.region, regions
def handle(self, *args, **options):
for operator in Operator.objects.all():
# move Anglian Bus to the East Anglia
regions = Region.objects.filter(service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
# move Cumbria to the North West
regions = Region.objects.filter(adminarea__locality__stoppoint__service__operator=operator).distinct()
self.maybe_move_operator(operator, regions)
|
3498ca5117a35d61a5b539067b7ac743497cf8c7
|
tests/test_helpers.py
|
tests/test_helpers.py
|
"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'FOO': 'my foo value', 'PATH': None}
assert os.environ == old
assert not os.environ.get('FOO'), "Invalid test setup"
assert not os.environ.get('BAR'), "Invalid test setup"
with EnvironContext(**new):
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
|
"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'PATH': None, 'FOO': 'my foo value'}
assert os.environ == old
assert os.environ.get('PATH'), "Invalid test setup"
assert not os.environ.get('FOO'), "Invalid test setup"
with EnvironContext(**new):
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
|
Make environ context helper test more accurate
|
Make environ context helper test more accurate
|
Python
|
bsd-3-clause
|
mdgart/sentrylogs
|
"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'FOO': 'my foo value', 'PATH': None}
assert os.environ == old
assert not os.environ.get('FOO'), "Invalid test setup"
assert not os.environ.get('BAR'), "Invalid test setup"
with EnvironContext(**new):
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
Make environ context helper test more accurate
|
"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'PATH': None, 'FOO': 'my foo value'}
assert os.environ == old
assert os.environ.get('PATH'), "Invalid test setup"
assert not os.environ.get('FOO'), "Invalid test setup"
with EnvironContext(**new):
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
|
<commit_before>"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'FOO': 'my foo value', 'PATH': None}
assert os.environ == old
assert not os.environ.get('FOO'), "Invalid test setup"
assert not os.environ.get('BAR'), "Invalid test setup"
with EnvironContext(**new):
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
<commit_msg>Make environ context helper test more accurate<commit_after>
|
"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'PATH': None, 'FOO': 'my foo value'}
assert os.environ == old
assert os.environ.get('PATH'), "Invalid test setup"
assert not os.environ.get('FOO'), "Invalid test setup"
with EnvironContext(**new):
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
|
"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'FOO': 'my foo value', 'PATH': None}
assert os.environ == old
assert not os.environ.get('FOO'), "Invalid test setup"
assert not os.environ.get('BAR'), "Invalid test setup"
with EnvironContext(**new):
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
Make environ context helper test more accurate"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'PATH': None, 'FOO': 'my foo value'}
assert os.environ == old
assert os.environ.get('PATH'), "Invalid test setup"
assert not os.environ.get('FOO'), "Invalid test setup"
with EnvironContext(**new):
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
|
<commit_before>"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'FOO': 'my foo value', 'PATH': None}
assert os.environ == old
assert not os.environ.get('FOO'), "Invalid test setup"
assert not os.environ.get('BAR'), "Invalid test setup"
with EnvironContext(**new):
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
<commit_msg>Make environ context helper test more accurate<commit_after>"""
Tests for our tests helpers 8-}
"""
import os
import sys
from helpers import ArgvContext, EnvironContext
def test_argv_context():
"""
Test if ArgvContext sets the right argvs and resets to the old correctly
"""
old = sys.argv
new = ["Alice", "Bob", "Chris", "Daisy"]
assert sys.argv == old
with ArgvContext(*new):
assert sys.argv == new, \
"sys.argv wasn't correctly changed by the contextmanager"
assert sys.argv == old, "sys.argv wasn't correctly reset"
def test_environ_context():
"""
Test if EnvironContext sets the right environ values and resets to
the old values correctly
"""
old = os.environ
new = {'PATH': None, 'FOO': 'my foo value'}
assert os.environ == old
assert os.environ.get('PATH'), "Invalid test setup"
assert not os.environ.get('FOO'), "Invalid test setup"
with EnvironContext(**new):
assert not os.environ.get('PATH'), \
"os.environ[PATH] wasn't removed by the contextmanager"
assert os.environ['FOO'] == new['FOO'], \
"os.environ[FOO] wasn't set by the contextmanager"
assert os.environ == old, "os.environ wasn't correctly reset"
|
6b2ac1d6be094eddc6a940eb1dafa32e483a6b7e
|
ereuse_devicehub/resources/device/peripheral/settings.py
|
ereuse_devicehub/resources/device/peripheral/settings.py
|
import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB', 'SAI',
'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector', 'VideoconferenceDevice'},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
|
import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {
'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB',
'SAI', 'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector',
'VideoconferenceDevice', 'SoundDevice', 'Microphone', 'WirelessMicrophone',
'Scaler', 'VideoScaler', 'MemoryCardReader', 'Amplifier', 'AudioAmplifier'
},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
|
Add new types of peripherals
|
Add new types of peripherals
|
Python
|
agpl-3.0
|
eReuse/DeviceHub,eReuse/DeviceHub
|
import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB', 'SAI',
'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector', 'VideoconferenceDevice'},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
Add new types of peripherals
|
import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {
'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB',
'SAI', 'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector',
'VideoconferenceDevice', 'SoundDevice', 'Microphone', 'WirelessMicrophone',
'Scaler', 'VideoScaler', 'MemoryCardReader', 'Amplifier', 'AudioAmplifier'
},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
|
<commit_before>import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB', 'SAI',
'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector', 'VideoconferenceDevice'},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
<commit_msg>Add new types of peripherals<commit_after>
|
import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {
'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB',
'SAI', 'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector',
'VideoconferenceDevice', 'SoundDevice', 'Microphone', 'WirelessMicrophone',
'Scaler', 'VideoScaler', 'MemoryCardReader', 'Amplifier', 'AudioAmplifier'
},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
|
import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB', 'SAI',
'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector', 'VideoconferenceDevice'},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
Add new types of peripheralsimport copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {
'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB',
'SAI', 'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector',
'VideoconferenceDevice', 'SoundDevice', 'Microphone', 'WirelessMicrophone',
'Scaler', 'VideoScaler', 'MemoryCardReader', 'Amplifier', 'AudioAmplifier'
},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
|
<commit_before>import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB', 'SAI',
'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector', 'VideoconferenceDevice'},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
<commit_msg>Add new types of peripherals<commit_after>import copy
from ereuse_devicehub.resources.device.schema import Device
from ereuse_devicehub.resources.device.settings import DeviceSubSettings
class Peripheral(Device):
type = {
'type': 'string',
'allowed': {
'Router', 'Switch', 'Printer', 'Scanner', 'MultifunctionPrinter', 'Terminal', 'HUB',
'SAI', 'Keyboard', 'Mouse', 'WirelessAccessPoint', 'LabelPrinter', 'Projector',
'VideoconferenceDevice', 'SoundDevice', 'Microphone', 'WirelessMicrophone',
'Scaler', 'VideoScaler', 'MemoryCardReader', 'Amplifier', 'AudioAmplifier'
},
'required': True
}
manufacturer = copy.copy(Device.manufacturer)
manufacturer['required'] = True
serialNumber = copy.copy(Device.serialNumber)
serialNumber['required'] = True
model = copy.copy(Device.model)
model['required'] = True
class PeripheralSettings(DeviceSubSettings):
_schema = Peripheral
|
a8112a8ee3723d5ae097998efc7c43bd27cbee95
|
engineer/processors.py
|
engineer/processors.py
|
# coding=utf-8
import logging
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical(e.cmd)
logger.critical(e.output)
raise
logger.info("Preprocessed LESS file %s." % file)
return ""
|
# coding=utf-8
import logging
import platform
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical("Error pre-processing LESS file %s." % file)
logger.critical(e.output)
exit(1355)
except WindowsError as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.strerror)
exit(1355)
except Exception as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.message)
if platform.system() != 'Windows':
logger.critical("Are you sure lessc is on your path?")
exit(1355)
logger.info("Preprocessed LESS file %s." % file)
return ""
|
Handle LESS preprocessor errors more gracefully.
|
Handle LESS preprocessor errors more gracefully.
|
Python
|
mit
|
tylerbutler/engineer,tylerbutler/engineer,tylerbutler/engineer
|
# coding=utf-8
import logging
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical(e.cmd)
logger.critical(e.output)
raise
logger.info("Preprocessed LESS file %s." % file)
return ""
Handle LESS preprocessor errors more gracefully.
|
# coding=utf-8
import logging
import platform
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical("Error pre-processing LESS file %s." % file)
logger.critical(e.output)
exit(1355)
except WindowsError as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.strerror)
exit(1355)
except Exception as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.message)
if platform.system() != 'Windows':
logger.critical("Are you sure lessc is on your path?")
exit(1355)
logger.info("Preprocessed LESS file %s." % file)
return ""
|
<commit_before># coding=utf-8
import logging
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical(e.cmd)
logger.critical(e.output)
raise
logger.info("Preprocessed LESS file %s." % file)
return ""
<commit_msg>Handle LESS preprocessor errors more gracefully.<commit_after>
|
# coding=utf-8
import logging
import platform
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical("Error pre-processing LESS file %s." % file)
logger.critical(e.output)
exit(1355)
except WindowsError as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.strerror)
exit(1355)
except Exception as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.message)
if platform.system() != 'Windows':
logger.critical("Are you sure lessc is on your path?")
exit(1355)
logger.info("Preprocessed LESS file %s." % file)
return ""
|
# coding=utf-8
import logging
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical(e.cmd)
logger.critical(e.output)
raise
logger.info("Preprocessed LESS file %s." % file)
return ""
Handle LESS preprocessor errors more gracefully.# coding=utf-8
import logging
import platform
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical("Error pre-processing LESS file %s." % file)
logger.critical(e.output)
exit(1355)
except WindowsError as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.strerror)
exit(1355)
except Exception as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.message)
if platform.system() != 'Windows':
logger.critical("Are you sure lessc is on your path?")
exit(1355)
logger.info("Preprocessed LESS file %s." % file)
return ""
|
<commit_before># coding=utf-8
import logging
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical(e.cmd)
logger.critical(e.output)
raise
logger.info("Preprocessed LESS file %s." % file)
return ""
<commit_msg>Handle LESS preprocessor errors more gracefully.<commit_after># coding=utf-8
import logging
import platform
import subprocess
from path import path
from engineer.conf import settings
__author__ = 'tyler@tylerbutler.com'
logger = logging.getLogger(__name__)
# Helper function to preprocess LESS files on demand
def preprocess_less(file):
input_file = path(settings.OUTPUT_CACHE_DIR / settings.ENGINEER.STATIC_DIR.basename() / file)
css_file = path("%s.css" % str(input_file)[:-5])
if not css_file.exists():
cmd = str.format(str(settings.LESS_PREPROCESSOR), infile=input_file, outfile=css_file).split()
try:
result = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
logger.critical("Error pre-processing LESS file %s." % file)
logger.critical(e.output)
exit(1355)
except WindowsError as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.strerror)
exit(1355)
except Exception as e:
logger.critical("Unexpected error pre-processing LESS file %s." % file)
logger.critical(e.message)
if platform.system() != 'Windows':
logger.critical("Are you sure lessc is on your path?")
exit(1355)
logger.info("Preprocessed LESS file %s." % file)
return ""
|
70f4c55d760552829a86b30baa6d6eac3f6dc47f
|
billy/bin/commands/loaddistricts.py
|
billy/bin/commands/loaddistricts.py
|
import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
dist['boundary_id'] = dist['boundary_id'] % dist
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
|
import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
# dist['boundary_id'] = dist['boundary_id'] % dist
dist['boundary_id'] = dist['division_id'] # Stop-gap
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
|
Use division_id in place of bounary_id
|
Use division_id in place of bounary_id
|
Python
|
bsd-3-clause
|
loandy/billy,openstates/billy,openstates/billy,sunlightlabs/billy,sunlightlabs/billy,openstates/billy,mileswwatkins/billy,sunlightlabs/billy,mileswwatkins/billy,loandy/billy,loandy/billy,mileswwatkins/billy
|
import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
dist['boundary_id'] = dist['boundary_id'] % dist
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
Use division_id in place of bounary_id
|
import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
# dist['boundary_id'] = dist['boundary_id'] % dist
dist['boundary_id'] = dist['division_id'] # Stop-gap
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
|
<commit_before>import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
dist['boundary_id'] = dist['boundary_id'] % dist
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
<commit_msg>Use division_id in place of bounary_id<commit_after>
|
import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
# dist['boundary_id'] = dist['boundary_id'] % dist
dist['boundary_id'] = dist['division_id'] # Stop-gap
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
|
import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
dist['boundary_id'] = dist['boundary_id'] % dist
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
Use division_id in place of bounary_idimport os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
# dist['boundary_id'] = dist['boundary_id'] % dist
dist['boundary_id'] = dist['division_id'] # Stop-gap
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
|
<commit_before>import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
dist['boundary_id'] = dist['boundary_id'] % dist
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
<commit_msg>Use division_id in place of bounary_id<commit_after>import os
import logging
import unicodecsv
from billy.core import settings, db
from billy.bin.commands import BaseCommand
logger = logging.getLogger('billy')
class LoadDistricts(BaseCommand):
name = 'loaddistricts'
help = 'Load in the Open States districts'
def add_args(self):
self.add_argument('path', metavar='PATH', type=str,
help='path to the manual data')
def handle(self, args):
path = args.path
for file_ in os.listdir(path):
if not file_.endswith(".csv"):
continue
abbr, _ = file_.split(".csv")
self.load_districts(abbr, os.path.join(path, file_))
def load_districts(self, abbr, dist_filename):
if os.path.exists(dist_filename):
db.districts.remove({'abbr': abbr})
with open(dist_filename, 'r') as fd:
dist_csv = unicodecsv.DictReader(fd)
for dist in dist_csv:
dist['_id'] = '%(abbr)s-%(chamber)s-%(name)s' % dist
# dist['boundary_id'] = dist['boundary_id'] % dist
dist['boundary_id'] = dist['division_id'] # Stop-gap
dist['num_seats'] = int(dist['num_seats'])
db.districts.save(dist, safe=True)
else:
logging.getLogger('billy').warning("%s not found, continuing without "
"districts" % dist_filename)
|
7c438b64cf5e3a0accac201b28c1f74e031a4c34
|
fabfile.py
|
fabfile.py
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
Make env.path to /var/praekelt/<PROJECT> an absolute path.
|
Make env.path to /var/praekelt/<PROJECT> an absolute path.
|
Python
|
bsd-3-clause
|
praekelt/go-rts-zambia
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
Make env.path to /var/praekelt/<PROJECT> an absolute path.
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
<commit_before>from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
<commit_msg>Make env.path to /var/praekelt/<PROJECT> an absolute path.<commit_after>
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
Make env.path to /var/praekelt/<PROJECT> an absolute path.from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
<commit_before>from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
<commit_msg>Make env.path to /var/praekelt/<PROJECT> an absolute path.<commit_after>from fabric.api import cd, sudo, env
import os
expected_vars = [
'PROJECT',
]
for var in expected_vars:
if var not in os.environ:
raise Exception('Please specify %s environment variable' % (
var,))
PROJECT = os.environ['PROJECT']
USER = os.environ.get('USER', 'jmbo')
env.path = os.path.join('/', 'var', 'praekelt', PROJECT)
def restart():
sudo('/etc/init.d/nginx restart')
sudo('supervisorctl reload')
def deploy():
with cd(env.path):
sudo('git pull', user=USER)
sudo('ve/bin/python manage.py syncdb --migrate --noinput',
user=USER)
sudo('ve/bin/python manage.py collectstatic --noinput',
user=USER)
def install_packages(force=False):
with cd(env.path):
sudo('ve/bin/pip install %s -r requirements.pip' % (
'--upgrade' if force else '',), user=USER)
|
8c870071f95d8f42a0614de9db88d1f72e1c1672
|
api/users/serializers.py
|
api/users/serializers.py
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
# TODO: finish me
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
given_name = ser.CharField()
middle_name = ser.CharField(source='middle_names')
family_name = ser.CharField()
suffix = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
gravatar_url = ser.CharField()
employment_institutions = ser.ListField(source='jobs')
educational_institutions = ser.ListField(source='schools')
social_accounts = ser.DictField(source='social')
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
|
Add fields to user serializer
|
Add fields to user serializer
|
Python
|
apache-2.0
|
mattclark/osf.io,adlius/osf.io,KAsante95/osf.io,zamattiac/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,danielneis/osf.io,reinaH/osf.io,reinaH/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,jmcarp/osf.io,kch8qx/osf.io,ckc6cz/osf.io,barbour-em/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,SSJohns/osf.io,barbour-em/osf.io,laurenrevere/osf.io,kch8qx/osf.io,amyshi188/osf.io,saradbowman/osf.io,jolene-esposito/osf.io,doublebits/osf.io,asanfilippo7/osf.io,adlius/osf.io,lyndsysimon/osf.io,sloria/osf.io,abought/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,rdhyee/osf.io,samchrisinger/osf.io,danielneis/osf.io,asanfilippo7/osf.io,caseyrollins/osf.io,zamattiac/osf.io,RomanZWang/osf.io,felliott/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,TomBaxter/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,KAsante95/osf.io,alexschiller/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,adlius/osf.io,fabianvf/osf.io,caneruguz/osf.io,Nesiehr/osf.io,cosenal/osf.io,njantrania/osf.io,fabianvf/osf.io,cwisecarver/osf.io,abought/osf.io,emetsger/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,amyshi188/osf.io,cldershem/osf.io,Johnetordoff/osf.io,lyndsysimon/osf.io,lyndsysimon/osf.io,KAsante95/osf.io,pattisdr/osf.io,caseyrygt/osf.io,mluo613/osf.io,cslzchen/osf.io,alexschiller/osf.io,Nesiehr/osf.io,acshi/osf.io,reinaH/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,fabianvf/osf.io,caseyrygt/osf.io,HarryRybacki/osf.io,cosenal/osf.io,cwisecarver/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,samanehsan/osf.io,samchrisinger/osf.io,cldershem/osf.io,caseyrygt/osf.io,fabianvf/osf.io,wearpants/osf.io,jeffreyliu3230/osf.io,njantrania/osf.io,samchrisinger/osf.io,doublebits/osf.io,jmcarp/osf.io,alexschiller/osf.io,erinspace/osf.io,samanehsan/osf.io,cslzchen/osf.io,felliott/osf.io,caneruguz/osf.io,chennan47/osf.io,brandonPurvis/osf.io,kwierman/osf.io,leb2dg/osf.io,aaxelb/osf.io,HarryRybacki/osf.io,emetsger/osf.io,acshi/osf.io,binoculars/osf.io,wearpants/osf.io,aaxelb/osf.io,ZobairAlijan/osf.io,TomBaxter/osf.io,jolene-esposito/osf.io,chrisseto/osf.io,TomHeatwole/osf.io,sbt9uc/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,jeffreyliu3230/osf.io,asanfilippo7/osf.io,billyhunt/osf.io,samchrisinger/osf.io,bdyetton/prettychart,Ghalko/osf.io,binoculars/osf.io,barbour-em/osf.io,erinspace/osf.io,jnayak1/osf.io,cwisecarver/osf.io,doublebits/osf.io,mattclark/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,bdyetton/prettychart,mattclark/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,emetsger/osf.io,alexschiller/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,MerlinZhang/osf.io,cldershem/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,jinluyuan/osf.io,billyhunt/osf.io,aaxelb/osf.io,ckc6cz/osf.io,kwierman/osf.io,dplorimer/osf,Johnetordoff/osf.io,billyhunt/osf.io,petermalcolm/osf.io,ckc6cz/osf.io,alexschiller/osf.io,mluo613/osf.io,mluo613/osf.io,DanielSBrown/osf.io,zachjanicki/osf.io,HarryRybacki/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,arpitar/osf.io,wearpants/osf.io,amyshi188/osf.io,jnayak1/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,RomanZWang/osf.io,sloria/osf.io,Ghalko/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,haoyuchen1992/osf.io,danielneis/osf.io,KAsante95/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,mfraezz/osf.io,sloria/osf.io,leb2dg/osf.io,haoyuchen1992/osf.io,dplorimer/osf,zachjanicki/osf.io,cwisecarver/osf.io,billyhunt/osf.io,barbour-em/osf.io,cosenal/osf.io,erinspace/osf.io,mfraezz/osf.io,mluke93/osf.io,caseyrollins/osf.io,sbt9uc/osf.io,acshi/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,binoculars/osf.io,jinluyuan/osf.io,RomanZWang/osf.io,cosenal/osf.io,abought/osf.io,doublebits/osf.io,monikagrabowska/osf.io,cldershem/osf.io,abought/osf.io,kch8qx/osf.io,Ghalko/osf.io,acshi/osf.io,Nesiehr/osf.io,MerlinZhang/osf.io,kwierman/osf.io,felliott/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,laurenrevere/osf.io,hmoco/osf.io,brandonPurvis/osf.io,emetsger/osf.io,zachjanicki/osf.io,haoyuchen1992/osf.io,bdyetton/prettychart,pattisdr/osf.io,HarryRybacki/osf.io,Johnetordoff/osf.io,ticklemepierce/osf.io,asanfilippo7/osf.io,caneruguz/osf.io,rdhyee/osf.io,petermalcolm/osf.io,crcresearch/osf.io,dplorimer/osf,jolene-esposito/osf.io,haoyuchen1992/osf.io,dplorimer/osf,samanehsan/osf.io,amyshi188/osf.io,reinaH/osf.io,MerlinZhang/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,ZobairAlijan/osf.io,zamattiac/osf.io,arpitar/osf.io,doublebits/osf.io,chrisseto/osf.io,RomanZWang/osf.io,wearpants/osf.io,hmoco/osf.io,icereval/osf.io,jeffreyliu3230/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,icereval/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,icereval/osf.io,felliott/osf.io,caseyrollins/osf.io,mluke93/osf.io,cslzchen/osf.io,jmcarp/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,sbt9uc/osf.io,rdhyee/osf.io,KAsante95/osf.io,baylee-d/osf.io,monikagrabowska/osf.io,zachjanicki/osf.io,njantrania/osf.io,mluo613/osf.io,njantrania/osf.io,lyndsysimon/osf.io,Ghalko/osf.io,chrisseto/osf.io,SSJohns/osf.io,SSJohns/osf.io,arpitar/osf.io,jinluyuan/osf.io,ckc6cz/osf.io,GageGaskins/osf.io,jolene-esposito/osf.io,acshi/osf.io,bdyetton/prettychart,jinluyuan/osf.io,chennan47/osf.io,hmoco/osf.io,mluke93/osf.io,jmcarp/osf.io,arpitar/osf.io,adlius/osf.io,DanielSBrown/osf.io,hmoco/osf.io,kwierman/osf.io,jnayak1/osf.io,ticklemepierce/osf.io,caneruguz/osf.io,TomBaxter/osf.io,chrisseto/osf.io,mluke93/osf.io
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
# TODO: finish me
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
Add fields to user serializer
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
given_name = ser.CharField()
middle_name = ser.CharField(source='middle_names')
family_name = ser.CharField()
suffix = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
gravatar_url = ser.CharField()
employment_institutions = ser.ListField(source='jobs')
educational_institutions = ser.ListField(source='schools')
social_accounts = ser.DictField(source='social')
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
|
<commit_before>from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
# TODO: finish me
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
<commit_msg>Add fields to user serializer<commit_after>
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
given_name = ser.CharField()
middle_name = ser.CharField(source='middle_names')
family_name = ser.CharField()
suffix = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
gravatar_url = ser.CharField()
employment_institutions = ser.ListField(source='jobs')
educational_institutions = ser.ListField(source='schools')
social_accounts = ser.DictField(source='social')
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
# TODO: finish me
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
Add fields to user serializerfrom rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
given_name = ser.CharField()
middle_name = ser.CharField(source='middle_names')
family_name = ser.CharField()
suffix = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
gravatar_url = ser.CharField()
employment_institutions = ser.ListField(source='jobs')
educational_institutions = ser.ListField(source='schools')
social_accounts = ser.DictField(source='social')
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
|
<commit_before>from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
# TODO: finish me
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
<commit_msg>Add fields to user serializer<commit_after>from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, Link
class UserSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True, source='_id')
fullname = ser.CharField()
given_name = ser.CharField()
middle_name = ser.CharField(source='middle_names')
family_name = ser.CharField()
suffix = ser.CharField()
date_registered = ser.DateTimeField(read_only=True)
gravatar_url = ser.CharField()
employment_institutions = ser.ListField(source='jobs')
educational_institutions = ser.ListField(source='schools')
social_accounts = ser.DictField(source='social')
links = LinksField({
'html': 'absolute_url',
'nodes': {
'relation': Link('users:user-nodes', kwargs={'pk': '<pk>'})
}
})
class Meta:
type_ = 'users'
def update(self, instance, validated_data):
# TODO
pass
|
25061826bb3316d0fb25cfae0e5d36a0f329f803
|
bayesian_jobs/handlers/clean_postgres.py
|
bayesian_jobs/handlers/clean_postgres.py
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if not entry.task_result or 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
|
Fix exception when task result is null
|
Fix exception when task result is null
|
Python
|
apache-2.0
|
fabric8-analytics/fabric8-analytics-jobs,fabric8-analytics/fabric8-analytics-jobs
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
Fix exception when task result is null
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if not entry.task_result or 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
|
<commit_before>from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
<commit_msg>Fix exception when task result is null<commit_after>
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if not entry.task_result or 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
|
from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
Fix exception when task result is nullfrom selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if not entry.task_result or 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
|
<commit_before>from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
<commit_msg>Fix exception when task result is null<commit_after>from selinon import StoragePool
from cucoslib.models import WorkerResult, Analysis
from .base import BaseHandler
class CleanPostgres(BaseHandler):
""" Clean JSONB columns in Postgres """
def execute(self):
s3 = StoragePool.get_connected_storage('S3Data')
start = 0
while True:
results = self.postgres.session.query(WorkerResult).\
join(Analysis).\
filter(Analysis.finished_at != None).\
filter(WorkerResult.external_request_id == None).\
order_by(WorkerResult.id).\
slice(start, start + 10).all()
if not results:
self.log.info("Cleaning postgres finished")
break
self.log.info("Updating results, slice offset is %s", start)
start += 10
for entry in results:
if entry.worker[0].isupper() or entry.worker in ('recommendation', 'stack_aggregator'):
continue
if not entry.task_result or 'VersionId' in entry.task_result:
continue
result_object_key = s3._construct_task_result_object_key(entry.ecosystem.name,
entry.package.name,
entry.version.identifier,
entry.worker)
if s3.object_exists(result_object_key):
entry.task_result = {'VersionId': s3.retrieve_latest_version_id(result_object_key)}
else:
entry.task_result = None
entry.error = True
self.postgres.session.commit()
del entry
|
47bb8e983dad168451d65c0032f5568357a8d359
|
battlesnake/plugins/imc2/triggers.py
|
battlesnake/plugins/imc2/triggers.py
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
Adjust IMC2 trigger regex to handle multiple colons correctly.
|
Adjust IMC2 trigger regex to handle multiple colons correctly.
|
Python
|
bsd-3-clause
|
gtaylor/btmux_battlesnake
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
Adjust IMC2 trigger regex to handle multiple colons correctly.
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
<commit_before>import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
<commit_msg>Adjust IMC2 trigger regex to handle multiple colons correctly.<commit_after>
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
Adjust IMC2 trigger regex to handle multiple colons correctly.import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
<commit_before>import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>.*): (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
<commit_msg>Adjust IMC2 trigger regex to handle multiple colons correctly.<commit_after>import re
from battlesnake.core.triggers import TriggerTable
from battlesnake.core.triggers import Trigger
from battlesnake.plugins.imc2 import imc2
from battlesnake.plugins.imc2.channel_map import MUX_TO_IMC2_CHANNEL_MAP
class ChannelMessageTrigger(Trigger):
"""
Tries to identify potential IMC2 channel activity.
"""
line_regex = re.compile(r'.*\[(?P<channel>.*)\] (?P<author>[\w`$_\-.,\']+)[:] (?P<message>.*)\r')
def run(self, protocol, line, re_match):
"""
:param basestring line: The line that matched the trigger.
:param re.MatchObject re_match: A Python MatchObject for the regex
groups specified in the Trigger's regex string.
"""
channel = re_match.group("channel")
author = re_match.group("author")
message = re_match.group("message")
imc2_channel = MUX_TO_IMC2_CHANNEL_MAP.get(channel, None)
imc2.IMC2_PROTO_INSTANCE.data_out(
text=message, packet_type="broadcast", sender=author,
channel=imc2_channel)
class IMC2TriggerTable(TriggerTable):
triggers = [
ChannelMessageTrigger,
]
|
99e1b98540c9c4105e4b0e1638a7856436cd31ad
|
beastling/clocks/prior.py
|
beastling/clocks/prior.py
|
import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
super().__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
|
import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
try:
super().__init__(clock_config, global_config)
except TypeError:
# Python 2 has no argument-less super()
super(RatePriorClock, self).__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
|
Fix super call for python2
|
Fix super call for python2
|
Python
|
bsd-2-clause
|
lmaurits/BEASTling
|
import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
super().__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
Fix super call for python2
|
import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
try:
super().__init__(clock_config, global_config)
except TypeError:
# Python 2 has no argument-less super()
super(RatePriorClock, self).__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
|
<commit_before>import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
super().__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
<commit_msg>Fix super call for python2<commit_after>
|
import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
try:
super().__init__(clock_config, global_config)
except TypeError:
# Python 2 has no argument-less super()
super(RatePriorClock, self).__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
|
import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
super().__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
Fix super call for python2import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
try:
super().__init__(clock_config, global_config)
except TypeError:
# Python 2 has no argument-less super()
super(RatePriorClock, self).__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
|
<commit_before>import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
super().__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
<commit_msg>Fix super call for python2<commit_after>import sys
import xml.etree.ElementTree as ET
from ..distributions import Distribution
from .baseclock import BaseClock
from .strict import StrictClock
class RatePriorClock (BaseClock):
# Class stub for putting priors on clock rates
def __init__(self, clock_config, global_config):
try:
super().__init__(clock_config, global_config)
except TypeError:
# Python 2 has no argument-less super()
super(RatePriorClock, self).__init__(clock_config, global_config)
self.distribution = Distribution.from_string(
clock_config.get(
"rate", "lognormal(-6.9077552789821368, 2.3025850929940459)"),
context="clock {:s}".format(self.name),
is_point=True)
self.initial_mean = self.distribution.mean()
if clock_config.get(
"estimate_rate", True) and self.distribution.dist == "point":
self.distribution = Distribution(0, "uniform", (0, sys.maxsize))
def add_prior(self, prior):
# TODO: Lift some logic from beastxml.BeastXML.add_calibration
# and surroundings to parse prior specifications.
# Uniform prior on mean clock rate
sub_prior = ET.SubElement(
prior, "prior",
{"id": "clockPrior:%s" % self.name,
"name": "distribution",
"x": "@clockRate.c:%s" % self.name})
self.distribution.generate_xml_element(
sub_prior)
class StrictClockWithPrior (RatePriorClock, StrictClock):
pass
|
f6ddb5b76265d7597568d6169ed877e04c565f4a
|
games/managers.py
|
games/managers.py
|
from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True)
|
from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True).order_by('uploaded_at')
|
Order screenshots by ascending upload time in the front-end
|
Order screenshots by ascending upload time in the front-end
So that it's easy to order them intentionally. :)
... Until we come up with a better ordering solution, with weights or
something.
|
Python
|
agpl-3.0
|
lutris/website,Turupawn/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website,Turupawn/website,lutris/website
|
from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True)
Order screenshots by ascending upload time in the front-end
So that it's easy to order them intentionally. :)
... Until we come up with a better ordering solution, with weights or
something.
|
from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True).order_by('uploaded_at')
|
<commit_before>from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True)
<commit_msg>Order screenshots by ascending upload time in the front-end
So that it's easy to order them intentionally. :)
... Until we come up with a better ordering solution, with weights or
something.<commit_after>
|
from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True).order_by('uploaded_at')
|
from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True)
Order screenshots by ascending upload time in the front-end
So that it's easy to order them intentionally. :)
... Until we come up with a better ordering solution, with weights or
something.from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True).order_by('uploaded_at')
|
<commit_before>from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True)
<commit_msg>Order screenshots by ascending upload time in the front-end
So that it's easy to order them intentionally. :)
... Until we come up with a better ordering solution, with weights or
something.<commit_after>from django.db.models import Manager
class ScreenshotManager(Manager):
def published(self):
return self.get_query_set().filter(published=True).order_by('uploaded_at')
|
8f3484020ba44d3ddfc721671601b63248c4063e
|
server.py
|
server.py
|
#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8000, reloader=True)
|
#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8080, reloader=True)
|
Change default port to 8080
|
Change default port to 8080
|
Python
|
mit
|
jasontbradshaw/multivid,jasontbradshaw/multivid
|
#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8000, reloader=True)
Change default port to 8080
|
#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8080, reloader=True)
|
<commit_before>#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8000, reloader=True)
<commit_msg>Change default port to 8080<commit_after>
|
#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8080, reloader=True)
|
#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8000, reloader=True)
Change default port to 8080#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8080, reloader=True)
|
<commit_before>#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8000, reloader=True)
<commit_msg>Change default port to 8080<commit_after>#!/usr/bin/env python
import os
import bottle
import multivid
# where static files are kept
STATIC_FILES_ROOT = os.path.abspath("static")
@bottle.route("/")
def index():
return bottle.static_file("index.html", root=STATIC_FILES_ROOT)
@bottle.route('/static/<filename:path>')
def serve_static(filename):
return bottle.static_file(filename, root=STATIC_FILES_ROOT)
@bottle.get("/search/autocomplete")
def autocomplete():
query = bottle.request.query["query"]
results = multivid.autocomplete(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
@bottle.get("/search/find")
def find():
query = bottle.request.query["query"]
results = multivid.find(query)
return {
"query": query,
"results": [r.to_dict() for r in results]
}
bottle.debug(True)
bottle.run(host="localhost", port=8080, reloader=True)
|
f2482b7395a5d22fb0b627fff767a57db45e662b
|
server.py
|
server.py
|
import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.py", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
|
import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.txt", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
|
Fix adjectives filename in serer.py
|
Fix adjectives filename in serer.py
|
Python
|
mit
|
ollien/Screenshot-Uploader,ollien/Screenshot-Uploader
|
import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.py", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
Fix adjectives filename in serer.py
|
import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.txt", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
|
<commit_before>import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.py", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
<commit_msg>Fix adjectives filename in serer.py<commit_after>
|
import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.txt", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
|
import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.py", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
Fix adjectives filename in serer.pyimport cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.txt", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
|
<commit_before>import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.py", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
<commit_msg>Fix adjectives filename in serer.py<commit_after>import cherrypy
import cgi
from configReader import ConfigReader
import os.path
import hashlib
from time import time
configReader = ConfigReader(name = "serverConfig.txt")
keys = configReader.getKeys()
location = keys['location']
adjectivesFile = open("adjectives.txt", "r")
adjectives = [line.rstrip() for line in adjectivesFile]
adjectivesFile.close()
class Main():
@cherrypy.expose
def index(self):
raise cherrypy.HTTPError(403)
@cherrypy.expose
def upload(self, **kwargs):
timeBytes = bytes(str(time()))
timeHash = hashlib.md5(timeBytes).hexDigest()[0:6]
adjective = random.choice(adjectives)
name = adjective + timeHash + ".png"
cherrypy.request.body.process()
parts = cherrypy.request.params['file']
outFile = open(os.path.join(location, name),'w')
if parts != None:
for part in parts:
outFile.write(part.fullvalue())
outFile.close()
return name
else:
raise ValueError
application = cherrypy.tree.mount(Main(), '/')
if __name__=='__main__':
cherrypy.server.socket_host='0.0.0.0'
cherrypy.engine.start()
cherrypy.engine.block()
|
ab69aaf5fecf429c99201db4cbcdab47c1afdd46
|
server.py
|
server.py
|
import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 32
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
if len(line) < buffsize:
complete = True
msg = "{}{}".format(msg, line)
conn.sendall("You sent: {}".format(msg))
print "You received: {}".format(msg)
conn.close()
except KeyboardInterrupt:
server_socket.close()
if __name__ == '__main__':
server()
|
import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 16
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
msg = "{}{}".format(msg, line)
if len(line) < buffsize:
complete = True
response = parse_request(msg)
try:
conn.sendall(response)
except TypeError:
conn.sendall(response_error('400', 'BAD REQUEST'))
conn.close()
except KeyboardInterrupt:
print "\nServer successfully shut down"
server_socket.close()
def response_ok(msg):
result = "HTTP/1.1 200 OK\r\n"
con_type = "Content-Type: text/plain\r\n"
body = "Content length: {}".format(len(msg))
return "{}{}{}".format(result, con_type, body)
def response_error(error_code, error_msg):
error_type = "HTTP/1.1 {} ERROR\r\n".format(error_code)
con_type = "Content-Type: text/plain\r\n"
body = "ERROR {}, {}\r\n".format(error_code, error_msg)
return "{}{}{}".format(error_type, con_type, body)
def parse_request(request):
request_pieces = request.split()
if len(request_pieces) != 5:
return None
error_check = check_errors(request_pieces)
if error_check == 'No Errors':
return response_ok(request_pieces[1])
return error_check
def check_errors(request):
if request[0] != 'GET':
return response_error('405', '{} METHOD NOT ALLOWED'.format(request[0]))
if request[2] != 'HTTP/1.1':
return response_error('505', '{} NOT SUPPORTED'.format(request[2]))
return 'No Errors'
if __name__ == '__main__':
server()
|
Add Functionality for HTTP requests
|
Add Functionality for HTTP requests
|
Python
|
mit
|
nbeck90/network_tools
|
import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 32
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
if len(line) < buffsize:
complete = True
msg = "{}{}".format(msg, line)
conn.sendall("You sent: {}".format(msg))
print "You received: {}".format(msg)
conn.close()
except KeyboardInterrupt:
server_socket.close()
if __name__ == '__main__':
server()
Add Functionality for HTTP requests
|
import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 16
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
msg = "{}{}".format(msg, line)
if len(line) < buffsize:
complete = True
response = parse_request(msg)
try:
conn.sendall(response)
except TypeError:
conn.sendall(response_error('400', 'BAD REQUEST'))
conn.close()
except KeyboardInterrupt:
print "\nServer successfully shut down"
server_socket.close()
def response_ok(msg):
result = "HTTP/1.1 200 OK\r\n"
con_type = "Content-Type: text/plain\r\n"
body = "Content length: {}".format(len(msg))
return "{}{}{}".format(result, con_type, body)
def response_error(error_code, error_msg):
error_type = "HTTP/1.1 {} ERROR\r\n".format(error_code)
con_type = "Content-Type: text/plain\r\n"
body = "ERROR {}, {}\r\n".format(error_code, error_msg)
return "{}{}{}".format(error_type, con_type, body)
def parse_request(request):
request_pieces = request.split()
if len(request_pieces) != 5:
return None
error_check = check_errors(request_pieces)
if error_check == 'No Errors':
return response_ok(request_pieces[1])
return error_check
def check_errors(request):
if request[0] != 'GET':
return response_error('405', '{} METHOD NOT ALLOWED'.format(request[0]))
if request[2] != 'HTTP/1.1':
return response_error('505', '{} NOT SUPPORTED'.format(request[2]))
return 'No Errors'
if __name__ == '__main__':
server()
|
<commit_before>import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 32
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
if len(line) < buffsize:
complete = True
msg = "{}{}".format(msg, line)
conn.sendall("You sent: {}".format(msg))
print "You received: {}".format(msg)
conn.close()
except KeyboardInterrupt:
server_socket.close()
if __name__ == '__main__':
server()
<commit_msg>Add Functionality for HTTP requests<commit_after>
|
import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 16
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
msg = "{}{}".format(msg, line)
if len(line) < buffsize:
complete = True
response = parse_request(msg)
try:
conn.sendall(response)
except TypeError:
conn.sendall(response_error('400', 'BAD REQUEST'))
conn.close()
except KeyboardInterrupt:
print "\nServer successfully shut down"
server_socket.close()
def response_ok(msg):
result = "HTTP/1.1 200 OK\r\n"
con_type = "Content-Type: text/plain\r\n"
body = "Content length: {}".format(len(msg))
return "{}{}{}".format(result, con_type, body)
def response_error(error_code, error_msg):
error_type = "HTTP/1.1 {} ERROR\r\n".format(error_code)
con_type = "Content-Type: text/plain\r\n"
body = "ERROR {}, {}\r\n".format(error_code, error_msg)
return "{}{}{}".format(error_type, con_type, body)
def parse_request(request):
request_pieces = request.split()
if len(request_pieces) != 5:
return None
error_check = check_errors(request_pieces)
if error_check == 'No Errors':
return response_ok(request_pieces[1])
return error_check
def check_errors(request):
if request[0] != 'GET':
return response_error('405', '{} METHOD NOT ALLOWED'.format(request[0]))
if request[2] != 'HTTP/1.1':
return response_error('505', '{} NOT SUPPORTED'.format(request[2]))
return 'No Errors'
if __name__ == '__main__':
server()
|
import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 32
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
if len(line) < buffsize:
complete = True
msg = "{}{}".format(msg, line)
conn.sendall("You sent: {}".format(msg))
print "You received: {}".format(msg)
conn.close()
except KeyboardInterrupt:
server_socket.close()
if __name__ == '__main__':
server()
Add Functionality for HTTP requestsimport socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 16
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
msg = "{}{}".format(msg, line)
if len(line) < buffsize:
complete = True
response = parse_request(msg)
try:
conn.sendall(response)
except TypeError:
conn.sendall(response_error('400', 'BAD REQUEST'))
conn.close()
except KeyboardInterrupt:
print "\nServer successfully shut down"
server_socket.close()
def response_ok(msg):
result = "HTTP/1.1 200 OK\r\n"
con_type = "Content-Type: text/plain\r\n"
body = "Content length: {}".format(len(msg))
return "{}{}{}".format(result, con_type, body)
def response_error(error_code, error_msg):
error_type = "HTTP/1.1 {} ERROR\r\n".format(error_code)
con_type = "Content-Type: text/plain\r\n"
body = "ERROR {}, {}\r\n".format(error_code, error_msg)
return "{}{}{}".format(error_type, con_type, body)
def parse_request(request):
request_pieces = request.split()
if len(request_pieces) != 5:
return None
error_check = check_errors(request_pieces)
if error_check == 'No Errors':
return response_ok(request_pieces[1])
return error_check
def check_errors(request):
if request[0] != 'GET':
return response_error('405', '{} METHOD NOT ALLOWED'.format(request[0]))
if request[2] != 'HTTP/1.1':
return response_error('505', '{} NOT SUPPORTED'.format(request[2]))
return 'No Errors'
if __name__ == '__main__':
server()
|
<commit_before>import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 32
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
if len(line) < buffsize:
complete = True
msg = "{}{}".format(msg, line)
conn.sendall("You sent: {}".format(msg))
print "You received: {}".format(msg)
conn.close()
except KeyboardInterrupt:
server_socket.close()
if __name__ == '__main__':
server()
<commit_msg>Add Functionality for HTTP requests<commit_after>import socket
def server():
server_socket = socket.socket(
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_IP)
server_socket.bind(('127.0.0.1', 50000))
server_socket.listen(10)
print "listening..."
try:
while True:
conn, addr = server_socket.accept()
buffsize = 16
msg = ""
complete = False
while not complete:
line = conn.recv(buffsize)
msg = "{}{}".format(msg, line)
if len(line) < buffsize:
complete = True
response = parse_request(msg)
try:
conn.sendall(response)
except TypeError:
conn.sendall(response_error('400', 'BAD REQUEST'))
conn.close()
except KeyboardInterrupt:
print "\nServer successfully shut down"
server_socket.close()
def response_ok(msg):
result = "HTTP/1.1 200 OK\r\n"
con_type = "Content-Type: text/plain\r\n"
body = "Content length: {}".format(len(msg))
return "{}{}{}".format(result, con_type, body)
def response_error(error_code, error_msg):
error_type = "HTTP/1.1 {} ERROR\r\n".format(error_code)
con_type = "Content-Type: text/plain\r\n"
body = "ERROR {}, {}\r\n".format(error_code, error_msg)
return "{}{}{}".format(error_type, con_type, body)
def parse_request(request):
request_pieces = request.split()
if len(request_pieces) != 5:
return None
error_check = check_errors(request_pieces)
if error_check == 'No Errors':
return response_ok(request_pieces[1])
return error_check
def check_errors(request):
if request[0] != 'GET':
return response_error('405', '{} METHOD NOT ALLOWED'.format(request[0]))
if request[2] != 'HTTP/1.1':
return response_error('505', '{} NOT SUPPORTED'.format(request[2]))
return 'No Errors'
if __name__ == '__main__':
server()
|
30008019f47f4077469ad12cb2a3e203fba24527
|
server.py
|
server.py
|
import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
|
import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
logging.getLogger('INIT').info('Connecting to mongodb at: %s' % options.db_address)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
|
Add to log db connection url
|
Add to log db connection url
|
Python
|
apache-2.0
|
jiss-software/jiss-file-service,jiss-software/jiss-file-service,jiss-software/jiss-file-service
|
import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
Add to log db connection url
|
import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
logging.getLogger('INIT').info('Connecting to mongodb at: %s' % options.db_address)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
|
<commit_before>import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
<commit_msg>Add to log db connection url<commit_after>
|
import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
logging.getLogger('INIT').info('Connecting to mongodb at: %s' % options.db_address)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
|
import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
Add to log db connection urlimport tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
logging.getLogger('INIT').info('Connecting to mongodb at: %s' % options.db_address)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
|
<commit_before>import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
<commit_msg>Add to log db connection url<commit_after>import tornado.ioloop
import tornado.web
import logging
import motor
from settings import routing
from tornado.options import options
import os
if not os.path.exists(options.log_dir):
os.makedirs(options.log_dir)
logging.basicConfig(
format='%(asctime)s [%(name)s] %(levelname)s: %(message)s',
filename='%s/%s' % (options.log_dir, options.log_file),
level=logging.DEBUG
)
logging.getLogger('INIT').info('Connecting to mongodb at: %s' % options.db_address)
ioLoop = tornado.ioloop.IOLoop.current()
mongodb = ioLoop.run_sync(motor.MotorClient(options.db_address).open)
app = tornado.web.Application(routing, db=mongodb, autoreload=options.autoreload)
app.listen(options.port)
if __name__ == "__main__":
try:
logging.info("Starting HTTP server on port %d" % options.port)
ioLoop.start()
except KeyboardInterrupt:
logging.info("Shutting down server HTTP proxy on port %d" % options.port)
ioLoop.stop()
|
448f1201a36de8ef41dadbb63cbea874dd7d5878
|
wechatpy/utils.py
|
wechatpy/utils.py
|
from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.binary_type(tmpstr)
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
|
from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.text_type(tmpstr).encode('utf-8')
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
|
Fix test error on Python 3
|
Fix test error on Python 3
|
Python
|
mit
|
cloverstd/wechatpy,wechatpy/wechatpy,EaseCloud/wechatpy,mruse/wechatpy,cysnake4713/wechatpy,cysnake4713/wechatpy,zhaoqz/wechatpy,navcat/wechatpy,zaihui/wechatpy,Luckyseal/wechatpy,messense/wechatpy,chenjiancan/wechatpy,chenjiancan/wechatpy,Luckyseal/wechatpy,tdautc19841202/wechatpy,navcat/wechatpy,Dufy/wechatpy,jxtech/wechatpy,Luckyseal/wechatpy,tdautc19841202/wechatpy,hunter007/wechatpy,mruse/wechatpy,EaseCloud/wechatpy,tdautc19841202/wechatpy,hunter007/wechatpy,cloverstd/wechatpy,Dufy/wechatpy,zaihui/wechatpy,zhaoqz/wechatpy,cysnake4713/wechatpy
|
from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.binary_type(tmpstr)
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
Fix test error on Python 3
|
from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.text_type(tmpstr).encode('utf-8')
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
|
<commit_before>from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.binary_type(tmpstr)
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
<commit_msg>Fix test error on Python 3<commit_after>
|
from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.text_type(tmpstr).encode('utf-8')
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
|
from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.binary_type(tmpstr)
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
Fix test error on Python 3from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.text_type(tmpstr).encode('utf-8')
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
|
<commit_before>from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.binary_type(tmpstr)
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
<commit_msg>Fix test error on Python 3<commit_after>from __future__ import absolute_import, unicode_literals
import hashlib
import six
class ObjectDict(dict):
def __getattr__(self, key):
if key in self:
return self[key]
return None
def __setattr__(self, key, value):
self[key] = value
def check_signature(token, signature, timestamp, nonce):
tmparr = [token, timestamp, nonce]
tmparr.sort()
tmpstr = ''.join(tmparr)
tmpstr = six.text_type(tmpstr).encode('utf-8')
digest = hashlib.sha1(tmpstr).hexdigest()
return digest == signature
|
915c59f1e8e1919555d6b3c8de5fbc34cd56e414
|
tingbot/platform_specific/__init__.py
|
tingbot/platform_specific/__init__.py
|
import platform, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if platform.system() == 'Darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
|
import sys, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if sys.platform == 'darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
|
Use sys.platform to look at the current platform
|
Use sys.platform to look at the current platform
Avoids a uname system call
|
Python
|
bsd-2-clause
|
furbrain/tingbot-python
|
import platform, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if platform.system() == 'Darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
Use sys.platform to look at the current platform
Avoids a uname system call
|
import sys, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if sys.platform == 'darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
|
<commit_before>import platform, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if platform.system() == 'Darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
<commit_msg>Use sys.platform to look at the current platform
Avoids a uname system call<commit_after>
|
import sys, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if sys.platform == 'darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
|
import platform, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if platform.system() == 'Darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
Use sys.platform to look at the current platform
Avoids a uname system callimport sys, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if sys.platform == 'darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
|
<commit_before>import platform, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if platform.system() == 'Darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
<commit_msg>Use sys.platform to look at the current platform
Avoids a uname system call<commit_after>import sys, os
def is_running_on_tingbot():
"""
Return True if running as a tingbot.
"""
# TB_RUN_ON_LCD is an environment variable set by tbprocessd when running tingbot apps.
return 'TB_RUN_ON_LCD' in os.environ
def no_op(*args, **kwargs):
pass
def no_op_returning(return_value):
def inner(*args, **kwargs):
return return_value
return inner
# set fallback functions (some of these will be replaced by the real versions below)
set_backlight = no_op
mouse_attached = no_op_returning(True)
keyboard_attached = no_op_returning(True)
joystick_attached = no_op_returning(False)
get_wifi_cell = no_op_returning(None)
if sys.platform == 'darwin':
from osx import fixup_env, create_main_surface, register_button_callback
elif is_running_on_tingbot():
from tingbot import (fixup_env, create_main_surface, register_button_callback,
set_backlight, mouse_attached, keyboard_attached, joystick_attached,
get_wifi_cell)
else:
from sdl_wrapper import fixup_env, create_main_surface, register_button_callback
|
76d9ce8638ad7e5e124a9f647f174c2a3adbc426
|
src/zeit/cms/generation/evolve14.py
|
src/zeit/cms/generation/evolve14.py
|
from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, _ in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
def evolve(context):
zeit.cms.generation.do_evolve(context, update)
|
from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, service in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
del service.__parent__[service.__name__]
def evolve(context):
"""Remove the lovely.remotetask services."""
zeit.cms.generation.do_evolve(context, update)
|
Remove services and jobs from ZODB, too.
|
ZON-3514: Remove services and jobs from ZODB, too.
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.cms,ZeitOnline/zeit.cms,ZeitOnline/zeit.cms,ZeitOnline/zeit.cms
|
from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, _ in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
def evolve(context):
zeit.cms.generation.do_evolve(context, update)
ZON-3514: Remove services and jobs from ZODB, too.
|
from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, service in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
del service.__parent__[service.__name__]
def evolve(context):
"""Remove the lovely.remotetask services."""
zeit.cms.generation.do_evolve(context, update)
|
<commit_before>from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, _ in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
def evolve(context):
zeit.cms.generation.do_evolve(context, update)
<commit_msg>ZON-3514: Remove services and jobs from ZODB, too.<commit_after>
|
from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, service in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
del service.__parent__[service.__name__]
def evolve(context):
"""Remove the lovely.remotetask services."""
zeit.cms.generation.do_evolve(context, update)
|
from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, _ in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
def evolve(context):
zeit.cms.generation.do_evolve(context, update)
ZON-3514: Remove services and jobs from ZODB, too.from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, service in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
del service.__parent__[service.__name__]
def evolve(context):
"""Remove the lovely.remotetask services."""
zeit.cms.generation.do_evolve(context, update)
|
<commit_before>from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, _ in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
def evolve(context):
zeit.cms.generation.do_evolve(context, update)
<commit_msg>ZON-3514: Remove services and jobs from ZODB, too.<commit_after>from lovely.remotetask.interfaces import ITaskService
import zeit.cms.generation
import zeit.cms.generation.install
import zope.component
def update(root):
site_manager = zope.component.getSiteManager()
for name, service in site_manager.getUtilitiesFor(ITaskService):
done = site_manager.unregisterUtility(provided=ITaskService, name=name)
if not done:
raise RuntimeError('unregisterUtility did not return True')
del service.__parent__[service.__name__]
def evolve(context):
"""Remove the lovely.remotetask services."""
zeit.cms.generation.do_evolve(context, update)
|
e9cca0d736cd388d4834e81ab6bf38ded6625b3d
|
pynmea2/types/proprietary/grm.py
|
pynmea2/types/proprietary/grm.py
|
# Garmin
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe"),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe"),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe"),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
|
# Garmin
from decimal import Decimal
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe", Decimal),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe", Decimal),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe", Decimal),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
|
Add decimal types to Garmin PGRME fields.
|
Add decimal types to Garmin PGRME fields.
|
Python
|
mit
|
silentquasar/pynmea2,Knio/pynmea2
|
# Garmin
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe"),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe"),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe"),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
Add decimal types to Garmin PGRME fields.
|
# Garmin
from decimal import Decimal
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe", Decimal),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe", Decimal),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe", Decimal),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
|
<commit_before># Garmin
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe"),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe"),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe"),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
<commit_msg>Add decimal types to Garmin PGRME fields.<commit_after>
|
# Garmin
from decimal import Decimal
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe", Decimal),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe", Decimal),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe", Decimal),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
|
# Garmin
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe"),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe"),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe"),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
Add decimal types to Garmin PGRME fields.# Garmin
from decimal import Decimal
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe", Decimal),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe", Decimal),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe", Decimal),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
|
<commit_before># Garmin
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe"),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe"),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe"),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
<commit_msg>Add decimal types to Garmin PGRME fields.<commit_after># Garmin
from decimal import Decimal
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[0]
super(GRM, self).__init__(manufacturer, data[1:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe", Decimal),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe", Decimal),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe", Decimal),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
|
1ce0d9898fc31f08bbf5765b3a687eaa8067a465
|
flaskext/flask_scss.py
|
flaskext/flask_scss.py
|
from .scss import Scss
|
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
Python
|
mit
|
bcarlin/flask-scss
|
from .scss import Scss
Raise a DeprecationWarning when using pre-Flask-0.8 import scheme
|
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
<commit_before>from .scss import Scss
<commit_msg>Raise a DeprecationWarning when using pre-Flask-0.8 import scheme<commit_after>
|
from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
from .scss import Scss
Raise a DeprecationWarning when using pre-Flask-0.8 import schemefrom .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
<commit_before>from .scss import Scss
<commit_msg>Raise a DeprecationWarning when using pre-Flask-0.8 import scheme<commit_after>from .scss import Scss
from warnings import warn
warn(DeprecationWarning('Deprecated import method. '
'Please use:\n '
'from flask.ext.scss import Scss'), stacklevel=2)
|
40642464aa4d21cb1710f9197bc3456467ed22a8
|
b2b_demo/views/basket.py
|
b2b_demo/views/basket.py
|
# This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.core.models import Address
from shoop.front.views.basket import DefaultBasketView
class AddressForm(forms.ModelForm):
class Meta:
model = Address
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
|
# This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.front.views.basket import DefaultBasketView
from shoop.core.models import MutableAddress
class AddressForm(forms.ModelForm):
class Meta:
model = MutableAddress
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
|
Use MutableAddress instead of Address
|
Use MutableAddress instead of Address
|
Python
|
agpl-3.0
|
shoopio/shoop-gifter-demo,shoopio/shoop-gifter-demo,shoopio/shoop-gifter-demo
|
# This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.core.models import Address
from shoop.front.views.basket import DefaultBasketView
class AddressForm(forms.ModelForm):
class Meta:
model = Address
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
Use MutableAddress instead of Address
|
# This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.front.views.basket import DefaultBasketView
from shoop.core.models import MutableAddress
class AddressForm(forms.ModelForm):
class Meta:
model = MutableAddress
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
|
<commit_before># This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.core.models import Address
from shoop.front.views.basket import DefaultBasketView
class AddressForm(forms.ModelForm):
class Meta:
model = Address
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
<commit_msg>Use MutableAddress instead of Address<commit_after>
|
# This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.front.views.basket import DefaultBasketView
from shoop.core.models import MutableAddress
class AddressForm(forms.ModelForm):
class Meta:
model = MutableAddress
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
|
# This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.core.models import Address
from shoop.front.views.basket import DefaultBasketView
class AddressForm(forms.ModelForm):
class Meta:
model = Address
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
Use MutableAddress instead of Address# This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.front.views.basket import DefaultBasketView
from shoop.core.models import MutableAddress
class AddressForm(forms.ModelForm):
class Meta:
model = MutableAddress
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
|
<commit_before># This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.core.models import Address
from shoop.front.views.basket import DefaultBasketView
class AddressForm(forms.ModelForm):
class Meta:
model = Address
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
<commit_msg>Use MutableAddress instead of Address<commit_after># This file is part of Shoop Gifter Demo.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django import forms
from shoop.front.views.basket import DefaultBasketView
from shoop.core.models import MutableAddress
class AddressForm(forms.ModelForm):
class Meta:
model = MutableAddress
fields = (
"name", "phone", "email", "street",
"street2", "postal_code", "city",
"region", "country"
)
def __init__(self, *args, **kwargs):
super(AddressForm, self).__init__(*args, **kwargs)
for field_name in ("email", "postal_code"):
self.fields[field_name].required = True
class B2bBasketView(DefaultBasketView):
shipping_address_form_class = AddressForm
billing_address_form_class = AddressForm
|
5fc467745ffc637e73cdf4dfb4a37b55c581434a
|
stanford/bin/send-email.py
|
stanford/bin/send-email.py
|
#!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
|
#!/usr/bin/env python
from email.mime.text import MIMEText
import os
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
email_params_file = 'configuration-secure/jenkins/cut-release-branch/email_params.txt'
email_params_file = os.environ.get('CONFIGURATION_EMAIL_PARAMS', email_params_file)
with open(email_params_file, 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
|
Use the existing version of params from secrets
|
Use the existing version of params from secrets
This way we don't need to add them manually!
|
Python
|
agpl-3.0
|
Stanford-Online/configuration,Stanford-Online/configuration,Stanford-Online/configuration,Stanford-Online/configuration,Stanford-Online/configuration
|
#!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
Use the existing version of params from secrets
This way we don't need to add them manually!
|
#!/usr/bin/env python
from email.mime.text import MIMEText
import os
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
email_params_file = 'configuration-secure/jenkins/cut-release-branch/email_params.txt'
email_params_file = os.environ.get('CONFIGURATION_EMAIL_PARAMS', email_params_file)
with open(email_params_file, 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
|
<commit_before>#!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
<commit_msg>Use the existing version of params from secrets
This way we don't need to add them manually!<commit_after>
|
#!/usr/bin/env python
from email.mime.text import MIMEText
import os
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
email_params_file = 'configuration-secure/jenkins/cut-release-branch/email_params.txt'
email_params_file = os.environ.get('CONFIGURATION_EMAIL_PARAMS', email_params_file)
with open(email_params_file, 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
|
#!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
Use the existing version of params from secrets
This way we don't need to add them manually!#!/usr/bin/env python
from email.mime.text import MIMEText
import os
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
email_params_file = 'configuration-secure/jenkins/cut-release-branch/email_params.txt'
email_params_file = os.environ.get('CONFIGURATION_EMAIL_PARAMS', email_params_file)
with open(email_params_file, 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
|
<commit_before>#!/usr/bin/env python
from email.mime.text import MIMEText
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
with open('configuration/stanford/bin/email_params.txt', 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
<commit_msg>Use the existing version of params from secrets
This way we don't need to add them manually!<commit_after>#!/usr/bin/env python
from email.mime.text import MIMEText
import os
from subprocess import call
import sys
def send(recipient, sender, sender_name, subject, body):
email_params_file = 'configuration-secure/jenkins/cut-release-branch/email_params.txt'
email_params_file = os.environ.get('CONFIGURATION_EMAIL_PARAMS', email_params_file)
with open(email_params_file, 'rt') as fin:
with open('email.txt', 'wt') as fout:
for line in fin:
line = line.replace('{RECIPIENT}', recipient).replace('{SENDER}', sender).replace('{SENDER_NAME}', sender_name).replace('{SUBJECT}', subject).replace('{BODY}', body)
fout.write(line)
cmd = ['openssl', 's_client', '-crlf', '-quiet', '-connect', 'email-smtp.us-east-1.amazonaws.com:465']
with open('email.txt') as fout:
call(cmd, stdin=fout)
call(['rm', 'email.txt'])
if __name__ == '__main__':
recipient = sys.argv[1]
sender = sys.argv[2]
sender_name = sys.argv[3]
subject = sys.argv[4]
path_file = sys.argv[5]
with open(path_file) as file_input:
body = file_input.read()
result = send(recipient, sender, sender_name, subject, body)
|
828844ddb6a19ea15c920043f41ba09eb815c597
|
django_rq/templatetags/django_rq.py
|
django_rq/templatetags/django_rq.py
|
from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
|
from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
if not time:
return None
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
|
Fix issue displaying deferred queue
|
Fix issue displaying deferred queue
|
Python
|
mit
|
ui/django-rq,ui/django-rq,1024inc/django-rq,1024inc/django-rq
|
from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
Fix issue displaying deferred queue
|
from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
if not time:
return None
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
|
<commit_before>from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
<commit_msg>Fix issue displaying deferred queue<commit_after>
|
from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
if not time:
return None
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
|
from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
Fix issue displaying deferred queuefrom django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
if not time:
return None
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
|
<commit_before>from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
<commit_msg>Fix issue displaying deferred queue<commit_after>from django import template
from django.utils import timezone
register = template.Library()
@register.filter
def to_localtime(time):
'''
A function to convert naive datetime to
localtime base on settings
'''
if not time:
return None
utc_time = time.replace(tzinfo=timezone.utc)
to_zone = timezone.get_default_timezone()
return utc_time.astimezone(to_zone)
|
bb9fc566677e92d5ad6bf08af62b610c6cdddbff
|
pipeline/compute_rpp/compute_rpp.py
|
pipeline/compute_rpp/compute_rpp.py
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for filename in filenames:
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
Add some verbose to know if we process things
|
Add some verbose to know if we process things
|
Python
|
mit
|
clemaitre58/power-profile,clemaitre58/power-profile,glemaitre/power-profile,glemaitre/power-profile
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for filename in filenames:
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
Add some verbose to know if we process things
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
<commit_before>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for filename in filenames:
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
<commit_msg>Add some verbose to know if we process things<commit_after>
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for filename in filenames:
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
Add some verbose to know if we process thingsimport sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
<commit_before>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for filename in filenames:
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
<commit_msg>Add some verbose to know if we process things<commit_after>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 300
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
5e412494e09d845dcb08529bd9c436f52cdda91b
|
studygroups/migrations/0034_create_facilitators_group.py
|
studygroups/migrations/0034_create_facilitators_group.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
Change data migration to work even if facilitator group already exists
|
Change data migration to work even if facilitator group already exists
|
Python
|
mit
|
p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
Change data migration to work even if facilitator group already exists
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
<commit_msg>Change data migration to work even if facilitator group already exists<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
Change data migration to work even if facilitator group already exists# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
group = Group(name="facilitators")
group.save()
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
<commit_msg>Change data migration to work even if facilitator group already exists<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def create_facilitators_group(apps, schema_editor):
Group = apps.get_model("auth", "Group")
Group.objects.get_or_create(name="facilitators")
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0033_auto_20150826_1408'),
]
operations = [
migrations.RunPython(create_facilitators_group),
]
|
a5f2df3a540ac99dea73bc7d1d3c29f70fb13c60
|
tympeg/streamsaver.py
|
tympeg/streamsaver.py
|
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
import platform
import signal
import sys
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
Clean up imports after expirements with signals for quitting
|
Clean up imports after expirements with signals for quitting
|
Python
|
mit
|
taishengy/tympeg
|
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
import platform
import signal
import sys
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
Clean up imports after expirements with signals for quitting
|
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
<commit_before>import subprocess
from os import path, mkdir
from tympeg.util import renameFile
import platform
import signal
import sys
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
<commit_msg>Clean up imports after expirements with signals for quitting<commit_after>
|
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
import subprocess
from os import path, mkdir
from tympeg.util import renameFile
import platform
import signal
import sys
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
Clean up imports after expirements with signals for quittingimport subprocess
from os import path, mkdir
from tympeg.util import renameFile
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
<commit_before>import subprocess
from os import path, mkdir
from tympeg.util import renameFile
import platform
import signal
import sys
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
<commit_msg>Clean up imports after expirements with signals for quitting<commit_after>import subprocess
from os import path, mkdir
from tympeg.util import renameFile
class StreamSaver:
def __init__(self, input_stream, output_file_path_ts, verbosity=24):
self.file_writer = None
self.analyzeduration = 5000000 # ffmpeg default value (milliseconds must be integer)
self.probesize = 5000000 # ffmpeg default value (bytes must be > 32 and integer)
directory, file_name = path.split(output_file_path_ts)
# make sure output is .ts file for stable writing
file_name, ext = file_name.split('.')
file_name += '.ts'
if not path.isdir(directory):
mkdir(directory)
if path.isfile(output_file_path_ts):
file_name = renameFile(file_name)
output_file_path_ts = path.join(directory, file_name)
self.args = ['ffmpeg', '-v', str(verbosity), '-analyzeduration', str(self.analyzeduration),
'-probesize', str(self.probesize), '-i', str(input_stream), '-c', 'copy', output_file_path_ts]
def run(self):
self.file_writer = subprocess.Popen(self.args)
def quit(self):
self.file_writer.terminate()
|
74bb8764fbeb65cb4a5b67597f3af4e8c2773794
|
dataportal/replay/core.py
|
dataportal/replay/core.py
|
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
|
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
non_stateful_attrs = ['history']
def save_state(history, history_key, state):
for key, val in state.items():
if key in non_stateful_attrs:
state[key] = None
history.put(history_key, state)
|
Add helper function to save state
|
ENH: Add helper function to save state
Sanitizes the Atom.__getstate() function for any blacklisted
Atom members that are unimportant for state saving or
non-JSON-serializable
|
Python
|
bsd-3-clause
|
ericdill/datamuxer,NSLS-II/dataportal,danielballan/datamuxer,NSLS-II/dataportal,ericdill/databroker,ericdill/databroker,ericdill/datamuxer,danielballan/datamuxer,danielballan/dataportal,tacaswell/dataportal,tacaswell/dataportal,NSLS-II/datamuxer,danielballan/dataportal
|
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
ENH: Add helper function to save state
Sanitizes the Atom.__getstate() function for any blacklisted
Atom members that are unimportant for state saving or
non-JSON-serializable
|
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
non_stateful_attrs = ['history']
def save_state(history, history_key, state):
for key, val in state.items():
if key in non_stateful_attrs:
state[key] = None
history.put(history_key, state)
|
<commit_before>"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
<commit_msg>ENH: Add helper function to save state
Sanitizes the Atom.__getstate() function for any blacklisted
Atom members that are unimportant for state saving or
non-JSON-serializable<commit_after>
|
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
non_stateful_attrs = ['history']
def save_state(history, history_key, state):
for key, val in state.items():
if key in non_stateful_attrs:
state[key] = None
history.put(history_key, state)
|
"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
ENH: Add helper function to save state
Sanitizes the Atom.__getstate() function for any blacklisted
Atom members that are unimportant for state saving or
non-JSON-serializable"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
non_stateful_attrs = ['history']
def save_state(history, history_key, state):
for key, val in state.items():
if key in non_stateful_attrs:
state[key] = None
history.put(history_key, state)
|
<commit_before>"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
<commit_msg>ENH: Add helper function to save state
Sanitizes the Atom.__getstate() function for any blacklisted
Atom members that are unimportant for state saving or
non-JSON-serializable<commit_after>"""Module for Enaml widgets that are generally useful"""
from enaml.widgets.api import PushButton, Timer
from atom.api import Typed, observe, Event
from enaml.core.declarative import d_
from enaml.layout.api import (grid, align)
class ProgrammaticButton(PushButton):
clicked = d_(Event(bool), writable=True)
toggled = d_(Event(bool), writable=True)
class TimerButton(ProgrammaticButton):
timer = d_(Typed(Timer))
checkable = True
@observe('checked')
def checked_changed(self, changed):
if self.checked:
self.timer.start()
else:
self.timer.stop()
def generate_grid(container, num_cols):
""" Generate grid constraints with given number of columns.
Notes
-----
Shamelessly copied from enaml/examples/layout/advanced/factory_func.enaml
"""
rows = []
widgets = container.visible_widgets()
row_iters = (iter(widgets),) * num_cols
rows = list(zip(*row_iters))
return [grid(*rows, row_spacing=0, column_spacing=0, row_align='v_center',
column_align='h_center'),
align('width', *widgets)]
non_stateful_attrs = ['history']
def save_state(history, history_key, state):
for key, val in state.items():
if key in non_stateful_attrs:
state[key] = None
history.put(history_key, state)
|
330cdc00e7b0f7cf18d208ea67499f22c82c9ad5
|
lowfat/tests_models.py
|
lowfat/tests_models.py
|
from django.test import TestCase
from .models import fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
|
from django.test import TestCase
from .models import Claimant, fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
class ClaimantSlugTest(TestCase):
def test_same_name(self):
claimant1 = Claimant.objects.create(
forenames='First Person',
surname='Test',
home_city='Testville',
phone=0
)
claimant2 = Claimant.objects.create(
forenames='Second Person',
surname='Test',
home_city='Testville',
phone=0
)
self.assertNotEqual(claimant1.slug, claimant2.slug)
|
Add test for Claimant slug field
|
Add test for Claimant slug field
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
from django.test import TestCase
from .models import fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
Add test for Claimant slug field
|
from django.test import TestCase
from .models import Claimant, fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
class ClaimantSlugTest(TestCase):
def test_same_name(self):
claimant1 = Claimant.objects.create(
forenames='First Person',
surname='Test',
home_city='Testville',
phone=0
)
claimant2 = Claimant.objects.create(
forenames='Second Person',
surname='Test',
home_city='Testville',
phone=0
)
self.assertNotEqual(claimant1.slug, claimant2.slug)
|
<commit_before>from django.test import TestCase
from .models import fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
<commit_msg>Add test for Claimant slug field<commit_after>
|
from django.test import TestCase
from .models import Claimant, fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
class ClaimantSlugTest(TestCase):
def test_same_name(self):
claimant1 = Claimant.objects.create(
forenames='First Person',
surname='Test',
home_city='Testville',
phone=0
)
claimant2 = Claimant.objects.create(
forenames='Second Person',
surname='Test',
home_city='Testville',
phone=0
)
self.assertNotEqual(claimant1.slug, claimant2.slug)
|
from django.test import TestCase
from .models import fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
Add test for Claimant slug fieldfrom django.test import TestCase
from .models import Claimant, fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
class ClaimantSlugTest(TestCase):
def test_same_name(self):
claimant1 = Claimant.objects.create(
forenames='First Person',
surname='Test',
home_city='Testville',
phone=0
)
claimant2 = Claimant.objects.create(
forenames='Second Person',
surname='Test',
home_city='Testville',
phone=0
)
self.assertNotEqual(claimant1.slug, claimant2.slug)
|
<commit_before>from django.test import TestCase
from .models import fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
<commit_msg>Add test for Claimant slug field<commit_after>from django.test import TestCase
from .models import Claimant, fix_url
class FixURLTest(TestCase):
def test_none(self):
url = None
expected_url = None
self.assertEqual(fix_url(url), expected_url)
def test_blank(self):
url = ""
expected_url = ""
self.assertEqual(fix_url(url), expected_url)
def test_without_protocol(self):
url = "software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_http(self):
url = "http://software.ac.uk"
expected_url = "http://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
def test_with_https(self):
url = "https://software.ac.uk"
expected_url = "https://software.ac.uk"
self.assertEqual(fix_url(url), expected_url)
class ClaimantSlugTest(TestCase):
def test_same_name(self):
claimant1 = Claimant.objects.create(
forenames='First Person',
surname='Test',
home_city='Testville',
phone=0
)
claimant2 = Claimant.objects.create(
forenames='Second Person',
surname='Test',
home_city='Testville',
phone=0
)
self.assertNotEqual(claimant1.slug, claimant2.slug)
|
db30c55c9949db63ffdee604f58130d33ce7c922
|
cumulusci/core/keychain/__init__.py
|
cumulusci/core/keychain/__init__.py
|
# IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
BaseProjectKeychain,
DEFAULT_CONNECTED_APP,
BaseEncryptedProjectKeychain,
EnvironmentProjectKeychain,
EncryptedFileProjectKeychain,
)
|
# IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
"BaseProjectKeychain",
"DEFAULT_CONNECTED_APP",
"BaseEncryptedProjectKeychain",
"EnvironmentProjectKeychain",
"EncryptedFileProjectKeychain",
)
|
Use __all__ to shut up Flake8
|
Use __all__ to shut up Flake8
|
Python
|
bsd-3-clause
|
SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI
|
# IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
BaseProjectKeychain,
DEFAULT_CONNECTED_APP,
BaseEncryptedProjectKeychain,
EnvironmentProjectKeychain,
EncryptedFileProjectKeychain,
)
Use __all__ to shut up Flake8
|
# IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
"BaseProjectKeychain",
"DEFAULT_CONNECTED_APP",
"BaseEncryptedProjectKeychain",
"EnvironmentProjectKeychain",
"EncryptedFileProjectKeychain",
)
|
<commit_before># IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
BaseProjectKeychain,
DEFAULT_CONNECTED_APP,
BaseEncryptedProjectKeychain,
EnvironmentProjectKeychain,
EncryptedFileProjectKeychain,
)
<commit_msg>Use __all__ to shut up Flake8<commit_after>
|
# IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
"BaseProjectKeychain",
"DEFAULT_CONNECTED_APP",
"BaseEncryptedProjectKeychain",
"EnvironmentProjectKeychain",
"EncryptedFileProjectKeychain",
)
|
# IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
BaseProjectKeychain,
DEFAULT_CONNECTED_APP,
BaseEncryptedProjectKeychain,
EnvironmentProjectKeychain,
EncryptedFileProjectKeychain,
)
Use __all__ to shut up Flake8# IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
"BaseProjectKeychain",
"DEFAULT_CONNECTED_APP",
"BaseEncryptedProjectKeychain",
"EnvironmentProjectKeychain",
"EncryptedFileProjectKeychain",
)
|
<commit_before># IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
BaseProjectKeychain,
DEFAULT_CONNECTED_APP,
BaseEncryptedProjectKeychain,
EnvironmentProjectKeychain,
EncryptedFileProjectKeychain,
)
<commit_msg>Use __all__ to shut up Flake8<commit_after># IMPORT ORDER MATTERS!
# inherit from BaseConfig
from cumulusci.core.keychain.BaseProjectKeychain import BaseProjectKeychain
from cumulusci.core.keychain.BaseProjectKeychain import DEFAULT_CONNECTED_APP
# inherit from BaseProjectKeychain
from cumulusci.core.keychain.BaseEncryptedProjectKeychain import (
BaseEncryptedProjectKeychain,
)
from cumulusci.core.keychain.EnvironmentProjectKeychain import (
EnvironmentProjectKeychain,
)
# inherit from BaseEncryptedProjectKeychain
from cumulusci.core.keychain.encrypted_file_project_keychain import (
EncryptedFileProjectKeychain,
)
__all__ = (
"BaseProjectKeychain",
"DEFAULT_CONNECTED_APP",
"BaseEncryptedProjectKeychain",
"EnvironmentProjectKeychain",
"EncryptedFileProjectKeychain",
)
|
963164d60bf9295233cf8050c6499a500f7c4ce7
|
benchmarks/bench_skan.py
|
benchmarks/bench_skan.py
|
import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load('infected3.npz')['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
|
import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load(os.path.join(rundir, 'infected3.npz'))['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
|
Use benchmark directory to load skeleton
|
Use benchmark directory to load skeleton
|
Python
|
bsd-3-clause
|
jni/skan
|
import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load('infected3.npz')['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
Use benchmark directory to load skeleton
|
import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load(os.path.join(rundir, 'infected3.npz'))['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
|
<commit_before>import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load('infected3.npz')['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
<commit_msg>Use benchmark directory to load skeleton<commit_after>
|
import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load(os.path.join(rundir, 'infected3.npz'))['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
|
import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load('infected3.npz')['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
Use benchmark directory to load skeletonimport os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load(os.path.join(rundir, 'infected3.npz'))['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
|
<commit_before>import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load('infected3.npz')['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
<commit_msg>Use benchmark directory to load skeleton<commit_after>import os
from contextlib import contextmanager
from collections import OrderedDict
from time import process_time
import numpy as np
from skan import csr
rundir = os.path.dirname(__file__)
@contextmanager
def timer():
time = []
t0 = process_time()
yield time
t1 = process_time()
time.append(t1 - t0)
def bench_suite():
times = OrderedDict()
skeleton = np.load(os.path.join(rundir, 'infected3.npz'))['skeleton']
with timer() as t_build_graph:
g, indices, degrees = csr.skeleton_to_csgraph(skeleton,
spacing=2.24826)
times['build graph'] = t_build_graph[0]
with timer() as t_stats:
stats = csr.branch_statistics(g, indices, degrees)
times['compute statistics'] = t_stats[0]
with timer() as t_summary:
summary = csr.summarise(skeleton)
times['compute per-skeleton statistics'] = t_summary[0]
return times
def print_bench_results(times=None, memory=None):
if times is not None:
print('Timing results:')
for key in times:
print('--- ', key, '%.3f s' % times[key])
if memory is not None:
print('Memory results:')
for key in memory:
print('--- ', key, '%.3f MB' % (memory[key] / 1e6))
if __name__ == '__main__':
times = bench_suite()
print_bench_results(times)
|
d3073f7d42f670a46018d82b52909618a3cc6619
|
lib/python2.5/aquilon/server/commands/add_manager_manager.py
|
lib/python2.5/aquilon/server/commands/add_manager_manager.py
|
#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname", "ip"]
#if __name__=='__main__':
|
#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname"]
#if __name__=='__main__':
|
Fix to allow dynamic ip parameters to work correctly.
|
add_manager: Fix to allow dynamic ip parameters to work correctly.
Addresses-Issue: Jira/AQUILONAQD-163
When the dynamic ip parameters were added to the commands beyond
add_host that took --ip, the required_parameters array for
add_manager_manager was not updated. The command thought that
the --ip parameter was required when it no longer is (since the
ip can now be chosen via the dynamic parameters).
|
Python
|
apache-2.0
|
quattor/aquilon,stdweird/aquilon,guillaume-philippon/aquilon,quattor/aquilon,guillaume-philippon/aquilon,guillaume-philippon/aquilon,stdweird/aquilon,stdweird/aquilon,quattor/aquilon
|
#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname", "ip"]
#if __name__=='__main__':
add_manager: Fix to allow dynamic ip parameters to work correctly.
Addresses-Issue: Jira/AQUILONAQD-163
When the dynamic ip parameters were added to the commands beyond
add_host that took --ip, the required_parameters array for
add_manager_manager was not updated. The command thought that
the --ip parameter was required when it no longer is (since the
ip can now be chosen via the dynamic parameters).
|
#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname"]
#if __name__=='__main__':
|
<commit_before>#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname", "ip"]
#if __name__=='__main__':
<commit_msg>add_manager: Fix to allow dynamic ip parameters to work correctly.
Addresses-Issue: Jira/AQUILONAQD-163
When the dynamic ip parameters were added to the commands beyond
add_host that took --ip, the required_parameters array for
add_manager_manager was not updated. The command thought that
the --ip parameter was required when it no longer is (since the
ip can now be chosen via the dynamic parameters).<commit_after>
|
#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname"]
#if __name__=='__main__':
|
#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname", "ip"]
#if __name__=='__main__':
add_manager: Fix to allow dynamic ip parameters to work correctly.
Addresses-Issue: Jira/AQUILONAQD-163
When the dynamic ip parameters were added to the commands beyond
add_host that took --ip, the required_parameters array for
add_manager_manager was not updated. The command thought that
the --ip parameter was required when it no longer is (since the
ip can now be chosen via the dynamic parameters).#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname"]
#if __name__=='__main__':
|
<commit_before>#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname", "ip"]
#if __name__=='__main__':
<commit_msg>add_manager: Fix to allow dynamic ip parameters to work correctly.
Addresses-Issue: Jira/AQUILONAQD-163
When the dynamic ip parameters were added to the commands beyond
add_host that took --ip, the required_parameters array for
add_manager_manager was not updated. The command thought that
the --ip parameter was required when it no longer is (since the
ip can now be chosen via the dynamic parameters).<commit_after>#!/ms/dist/python/PROJ/core/2.5.0/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Contains a wrapper for `aq add manager --manager`."""
from aquilon.server.broker import BrokerCommand
from aquilon.server.commands.add_manager import CommandAddManager
class CommandAddManagerManager(CommandAddManager):
""" CommandAddManager already has all the necessary logic to
handle the extra instance parameter.
"""
required_parameters = ["manager", "hostname"]
#if __name__=='__main__':
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.