commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4336a5d3eaf5500a6f3041b30c7887361dea5737
|
tests/test_formatting.py
|
tests/test_formatting.py
|
# -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
|
# -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
def test_wrapping_long_options_strings(runner):
@click.group()
def cli():
"""Top level command
"""
@cli.group()
def a_very_long():
"""Second level
"""
@a_very_long.command()
@click.argument('first')
@click.argument('second')
@click.argument('third')
@click.argument('fourth')
def command():
"""A command.
"""
result = runner.invoke(cli, ['a_very_long', 'command', '--help'],
terminal_width=54)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli a_very_long command [OPTIONS] FIRST SECOND',
' THIRD FOURTH',
'',
' A command.',
'',
'Options:',
' --help Show this message and exit.',
]
|
Add failing test for formatting
|
Add failing test for formatting
|
Python
|
bsd-3-clause
|
her0e1c1/click,MakerDAO/click,Akasurde/click,scalp42/click,khwilson/click,polinom/click,amjith/click,hellodk/click,jvrsantacruz/click,naoyat/click,lucius-feng/click,dastergon/click,TomRegan/click,hackebrot/click,cbandera/click,oss6/click,GeoffColburn/click,willingc/click,pallets/click,pgkelley4/click,glorizen/click,mitsuhiko/click,ternus/click,andela-ooladayo/click,hugopeixoto/click,gambogi/click,ma-ric/click,pombredanne/click,Nudies/click,nchammas/click
|
# -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
Add failing test for formatting
|
# -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
def test_wrapping_long_options_strings(runner):
@click.group()
def cli():
"""Top level command
"""
@cli.group()
def a_very_long():
"""Second level
"""
@a_very_long.command()
@click.argument('first')
@click.argument('second')
@click.argument('third')
@click.argument('fourth')
def command():
"""A command.
"""
result = runner.invoke(cli, ['a_very_long', 'command', '--help'],
terminal_width=54)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli a_very_long command [OPTIONS] FIRST SECOND',
' THIRD FOURTH',
'',
' A command.',
'',
'Options:',
' --help Show this message and exit.',
]
|
<commit_before># -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
<commit_msg>Add failing test for formatting<commit_after>
|
# -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
def test_wrapping_long_options_strings(runner):
@click.group()
def cli():
"""Top level command
"""
@cli.group()
def a_very_long():
"""Second level
"""
@a_very_long.command()
@click.argument('first')
@click.argument('second')
@click.argument('third')
@click.argument('fourth')
def command():
"""A command.
"""
result = runner.invoke(cli, ['a_very_long', 'command', '--help'],
terminal_width=54)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli a_very_long command [OPTIONS] FIRST SECOND',
' THIRD FOURTH',
'',
' A command.',
'',
'Options:',
' --help Show this message and exit.',
]
|
# -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
Add failing test for formatting# -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
def test_wrapping_long_options_strings(runner):
@click.group()
def cli():
"""Top level command
"""
@cli.group()
def a_very_long():
"""Second level
"""
@a_very_long.command()
@click.argument('first')
@click.argument('second')
@click.argument('third')
@click.argument('fourth')
def command():
"""A command.
"""
result = runner.invoke(cli, ['a_very_long', 'command', '--help'],
terminal_width=54)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli a_very_long command [OPTIONS] FIRST SECOND',
' THIRD FOURTH',
'',
' A command.',
'',
'Options:',
' --help Show this message and exit.',
]
|
<commit_before># -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
<commit_msg>Add failing test for formatting<commit_after># -*- coding: utf-8 -*-
import click
def test_basic_functionality(runner):
@click.command()
def cli():
"""First paragraph.
This is a very long second
paragraph and not correctly
wrapped but it will be rewrapped.
\b
This is
a paragraph
without rewrapping.
\b
1
2
3
And this is a paragraph
that will be rewrapped again.
"""
result = runner.invoke(cli, ['--help'], terminal_width=60)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli [OPTIONS]',
'',
' First paragraph.',
'',
' This is a very long second paragraph and not correctly',
' wrapped but it will be rewrapped.',
'',
' This is',
' a paragraph',
' without rewrapping.',
'',
' 1',
' 2',
' 3',
'',
' And this is a paragraph that will be rewrapped again.',
'',
'Options:',
' --help Show this message and exit.',
]
def test_wrapping_long_options_strings(runner):
@click.group()
def cli():
"""Top level command
"""
@cli.group()
def a_very_long():
"""Second level
"""
@a_very_long.command()
@click.argument('first')
@click.argument('second')
@click.argument('third')
@click.argument('fourth')
def command():
"""A command.
"""
result = runner.invoke(cli, ['a_very_long', 'command', '--help'],
terminal_width=54)
assert not result.exception
assert result.output.splitlines() == [
'Usage: cli a_very_long command [OPTIONS] FIRST SECOND',
' THIRD FOURTH',
'',
' A command.',
'',
'Options:',
' --help Show this message and exit.',
]
|
cb2dec164b18dc671dabdafe221bfd6aac4fc01e
|
onitu/drivers/local_storage/tests/driver.py
|
onitu/drivers/local_storage/tests/driver.py
|
import os
import sh
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
return sh.mkdir('-p', self.root / subdirs)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
(self.root / subdirs).makedirs_p()
# Give some time to inotify in order
# to avoid a known bug where new files
# if a recently created directory are
# ignored
# cf http://stackoverflow.com/a/17586891/180751
import time
time.sleep(0.1)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
|
Remove dependency to sh in the tests
|
LocalStorage: Remove dependency to sh in the tests
|
Python
|
mit
|
onitu/onitu,onitu/onitu,onitu/onitu
|
import os
import sh
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
return sh.mkdir('-p', self.root / subdirs)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
LocalStorage: Remove dependency to sh in the tests
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
(self.root / subdirs).makedirs_p()
# Give some time to inotify in order
# to avoid a known bug where new files
# if a recently created directory are
# ignored
# cf http://stackoverflow.com/a/17586891/180751
import time
time.sleep(0.1)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
|
<commit_before>import os
import sh
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
return sh.mkdir('-p', self.root / subdirs)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
<commit_msg>LocalStorage: Remove dependency to sh in the tests<commit_after>
|
import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
(self.root / subdirs).makedirs_p()
# Give some time to inotify in order
# to avoid a known bug where new files
# if a recently created directory are
# ignored
# cf http://stackoverflow.com/a/17586891/180751
import time
time.sleep(0.1)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
|
import os
import sh
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
return sh.mkdir('-p', self.root / subdirs)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
LocalStorage: Remove dependency to sh in the testsimport os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
(self.root / subdirs).makedirs_p()
# Give some time to inotify in order
# to avoid a known bug where new files
# if a recently created directory are
# ignored
# cf http://stackoverflow.com/a/17586891/180751
import time
time.sleep(0.1)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
|
<commit_before>import os
import sh
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
return sh.mkdir('-p', self.root / subdirs)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
<commit_msg>LocalStorage: Remove dependency to sh in the tests<commit_after>import os
from path import path
from tests.utils.testdriver import TestDriver
from tests.utils import files
from tests.utils.tempdirs import dirs
class Driver(TestDriver):
SPEED_BUMP = 1
def __init__(self, *args, **options):
if 'root' not in options:
options['root'] = dirs.create()
super(Driver, self).__init__('local_storage',
*args,
**options)
@property
def root(self):
return path(self.options['root'])
def close(self):
dirs.delete(self.root)
def mkdir(self, subdirs):
(self.root / subdirs).makedirs_p()
# Give some time to inotify in order
# to avoid a known bug where new files
# if a recently created directory are
# ignored
# cf http://stackoverflow.com/a/17586891/180751
import time
time.sleep(0.1)
def write(self, filename, content):
with open(self.root / filename, 'w+') as f:
f.write(content)
def generate(self, filename, size):
return files.generate(self.root / filename, size)
def exists(self, filename):
return os.path.exists(self.root / filename)
def unlink(self, filename):
return os.unlink(self.root / filename)
def checksum(self, filename):
return files.checksum(self.root / filename)
|
219f67e3e15c548b81211b0baff475621f66a7fa
|
scripts/dbutil/compute_asos_sts.py
|
scripts/dbutil/compute_asos_sts.py
|
# Look into the ASOS database and figure out the start time of various
# sites for a given network.
import sys
sys.path.insert(0, '../lib')
import db, network
asos = db.connect('asos')
mesosite = db.connect('mesosite')
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
rs = asos.query("SELECT station, min(valid) from alldata WHERE station in %s GROUP by station ORDER by min ASC" % (ids,)).dictresult()
for i in range(len(rs)):
print rs[i], table.sts[rs[i]['station']]['archive_begin']
sql = "UPDATE stations SET archive_begin = '%s' WHERE id = '%s' and network = '%s'" % (
rs[i]['min'], rs[i]['station'], net)
mesosite.query( sql )
|
# Look into the ASOS database and figure out the start time of various
# sites for a given network.
import iemdb, network, sys
asos = iemdb.connect('asos', bypass=True)
acursor = asos.cursor()
mesosite = iemdb.connect('mesosite')
mcursor = mesosite.cursor()
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
acursor.execute("""SELECT station, min(valid) from alldata
WHERE station in %s GROUP by station
ORDER by min ASC""" % (ids,))
for row in acursor:
station = row[0]
if table.sts[station]['archive_begin'] != row[1]:
print 'Updated %s STS WAS: %s NOW: %s' % (station,
table.sts[station]['archive_begin'], row[1])
mcursor.execute("""UPDATE stations SET archive_begin = %s
WHERE id = %s and network = %s""" , (row[1], station, net) )
mcursor.close()
mesosite.commit()
mesosite.close()
|
Make the output less noisey, more informative
|
Make the output less noisey, more informative
|
Python
|
mit
|
akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem
|
# Look into the ASOS database and figure out the start time of various
# sites for a given network.
import sys
sys.path.insert(0, '../lib')
import db, network
asos = db.connect('asos')
mesosite = db.connect('mesosite')
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
rs = asos.query("SELECT station, min(valid) from alldata WHERE station in %s GROUP by station ORDER by min ASC" % (ids,)).dictresult()
for i in range(len(rs)):
print rs[i], table.sts[rs[i]['station']]['archive_begin']
sql = "UPDATE stations SET archive_begin = '%s' WHERE id = '%s' and network = '%s'" % (
rs[i]['min'], rs[i]['station'], net)
mesosite.query( sql )
Make the output less noisey, more informative
|
# Look into the ASOS database and figure out the start time of various
# sites for a given network.
import iemdb, network, sys
asos = iemdb.connect('asos', bypass=True)
acursor = asos.cursor()
mesosite = iemdb.connect('mesosite')
mcursor = mesosite.cursor()
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
acursor.execute("""SELECT station, min(valid) from alldata
WHERE station in %s GROUP by station
ORDER by min ASC""" % (ids,))
for row in acursor:
station = row[0]
if table.sts[station]['archive_begin'] != row[1]:
print 'Updated %s STS WAS: %s NOW: %s' % (station,
table.sts[station]['archive_begin'], row[1])
mcursor.execute("""UPDATE stations SET archive_begin = %s
WHERE id = %s and network = %s""" , (row[1], station, net) )
mcursor.close()
mesosite.commit()
mesosite.close()
|
<commit_before># Look into the ASOS database and figure out the start time of various
# sites for a given network.
import sys
sys.path.insert(0, '../lib')
import db, network
asos = db.connect('asos')
mesosite = db.connect('mesosite')
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
rs = asos.query("SELECT station, min(valid) from alldata WHERE station in %s GROUP by station ORDER by min ASC" % (ids,)).dictresult()
for i in range(len(rs)):
print rs[i], table.sts[rs[i]['station']]['archive_begin']
sql = "UPDATE stations SET archive_begin = '%s' WHERE id = '%s' and network = '%s'" % (
rs[i]['min'], rs[i]['station'], net)
mesosite.query( sql )
<commit_msg>Make the output less noisey, more informative<commit_after>
|
# Look into the ASOS database and figure out the start time of various
# sites for a given network.
import iemdb, network, sys
asos = iemdb.connect('asos', bypass=True)
acursor = asos.cursor()
mesosite = iemdb.connect('mesosite')
mcursor = mesosite.cursor()
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
acursor.execute("""SELECT station, min(valid) from alldata
WHERE station in %s GROUP by station
ORDER by min ASC""" % (ids,))
for row in acursor:
station = row[0]
if table.sts[station]['archive_begin'] != row[1]:
print 'Updated %s STS WAS: %s NOW: %s' % (station,
table.sts[station]['archive_begin'], row[1])
mcursor.execute("""UPDATE stations SET archive_begin = %s
WHERE id = %s and network = %s""" , (row[1], station, net) )
mcursor.close()
mesosite.commit()
mesosite.close()
|
# Look into the ASOS database and figure out the start time of various
# sites for a given network.
import sys
sys.path.insert(0, '../lib')
import db, network
asos = db.connect('asos')
mesosite = db.connect('mesosite')
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
rs = asos.query("SELECT station, min(valid) from alldata WHERE station in %s GROUP by station ORDER by min ASC" % (ids,)).dictresult()
for i in range(len(rs)):
print rs[i], table.sts[rs[i]['station']]['archive_begin']
sql = "UPDATE stations SET archive_begin = '%s' WHERE id = '%s' and network = '%s'" % (
rs[i]['min'], rs[i]['station'], net)
mesosite.query( sql )
Make the output less noisey, more informative# Look into the ASOS database and figure out the start time of various
# sites for a given network.
import iemdb, network, sys
asos = iemdb.connect('asos', bypass=True)
acursor = asos.cursor()
mesosite = iemdb.connect('mesosite')
mcursor = mesosite.cursor()
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
acursor.execute("""SELECT station, min(valid) from alldata
WHERE station in %s GROUP by station
ORDER by min ASC""" % (ids,))
for row in acursor:
station = row[0]
if table.sts[station]['archive_begin'] != row[1]:
print 'Updated %s STS WAS: %s NOW: %s' % (station,
table.sts[station]['archive_begin'], row[1])
mcursor.execute("""UPDATE stations SET archive_begin = %s
WHERE id = %s and network = %s""" , (row[1], station, net) )
mcursor.close()
mesosite.commit()
mesosite.close()
|
<commit_before># Look into the ASOS database and figure out the start time of various
# sites for a given network.
import sys
sys.path.insert(0, '../lib')
import db, network
asos = db.connect('asos')
mesosite = db.connect('mesosite')
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
rs = asos.query("SELECT station, min(valid) from alldata WHERE station in %s GROUP by station ORDER by min ASC" % (ids,)).dictresult()
for i in range(len(rs)):
print rs[i], table.sts[rs[i]['station']]['archive_begin']
sql = "UPDATE stations SET archive_begin = '%s' WHERE id = '%s' and network = '%s'" % (
rs[i]['min'], rs[i]['station'], net)
mesosite.query( sql )
<commit_msg>Make the output less noisey, more informative<commit_after># Look into the ASOS database and figure out the start time of various
# sites for a given network.
import iemdb, network, sys
asos = iemdb.connect('asos', bypass=True)
acursor = asos.cursor()
mesosite = iemdb.connect('mesosite')
mcursor = mesosite.cursor()
net = sys.argv[1]
table = network.Table( net )
ids = `tuple(table.sts.keys())`
acursor.execute("""SELECT station, min(valid) from alldata
WHERE station in %s GROUP by station
ORDER by min ASC""" % (ids,))
for row in acursor:
station = row[0]
if table.sts[station]['archive_begin'] != row[1]:
print 'Updated %s STS WAS: %s NOW: %s' % (station,
table.sts[station]['archive_begin'], row[1])
mcursor.execute("""UPDATE stations SET archive_begin = %s
WHERE id = %s and network = %s""" , (row[1], station, net) )
mcursor.close()
mesosite.commit()
mesosite.close()
|
9ea29573841307ffe24b597dd8d1e0b783f81a2a
|
tests/app/views/test_application.py
|
tests/app/views/test_application.py
|
import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_should_have_analytics_on_page(self):
res = self.client.get('/')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
|
import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
|
Correct test to point at application.js
|
Correct test to point at application.js
The JS to search was previously in the page rather
than concatenated into the main JavaScript file.
|
Python
|
mit
|
alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,mtekel/digitalmarketplace-buyer-frontend
|
import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_should_have_analytics_on_page(self):
res = self.client.get('/')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
Correct test to point at application.js
The JS to search was previously in the page rather
than concatenated into the main JavaScript file.
|
import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
|
<commit_before>import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_should_have_analytics_on_page(self):
res = self.client.get('/')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
<commit_msg>Correct test to point at application.js
The JS to search was previously in the page rather
than concatenated into the main JavaScript file.<commit_after>
|
import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
|
import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_should_have_analytics_on_page(self):
res = self.client.get('/')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
Correct test to point at application.js
The JS to search was previously in the page rather
than concatenated into the main JavaScript file.import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
|
<commit_before>import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_should_have_analytics_on_page(self):
res = self.client.get('/')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
<commit_msg>Correct test to point at application.js
The JS to search was previously in the page rather
than concatenated into the main JavaScript file.<commit_after>import mock
from nose.tools import assert_equal, assert_true
from ...helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def setup(self):
super(TestApplication, self).setup()
def test_analytics_code_should_be_in_javascript(self):
res = self.client.get('/static/javascripts/application.js')
assert_equal(200, res.status_code)
assert_true(
'GOVUK.analytics.trackPageview'
in res.get_data(as_text=True))
|
e0adf4df50dcb366e7977f46e1f09ca04dd48cf2
|
blockbuster/bb_logging.py
|
blockbuster/bb_logging.py
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=5)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s: %(message)s - %(name)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
Update log output so that it works more nicely with ELK
|
Update log output so that it works more nicely with ELK
|
Python
|
mit
|
mattstibbs/blockbuster-server,mattstibbs/blockbuster-server
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=5)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s: %(message)s - %(name)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)Update log output so that it works more nicely with ELK
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
<commit_before>import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=5)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s: %(message)s - %(name)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)<commit_msg>Update log output so that it works more nicely with ELK<commit_after>
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=5)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s: %(message)s - %(name)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)Update log output so that it works more nicely with ELKimport logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
<commit_before>import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=5)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s: %(message)s - %(name)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)<commit_msg>Update log output so that it works more nicely with ELK<commit_after>import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
f7a1a849161007e3703b41758e99dd45609c9753
|
renovation_tax_be/models/account_invoice.py
|
renovation_tax_be/models/account_invoice.py
|
from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
line._onchange_product_id()
|
from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
price = line.unit_price
line._onchange_product_id()
line.unit_price = price
|
Fix loss of unit price if edited
|
Fix loss of unit price if edited
|
Python
|
agpl-3.0
|
Somko/Odoo-Public,Somko/Odoo-Public
|
from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
line._onchange_product_id()
Fix loss of unit price if edited
|
from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
price = line.unit_price
line._onchange_product_id()
line.unit_price = price
|
<commit_before>from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
line._onchange_product_id()
<commit_msg>Fix loss of unit price if edited<commit_after>
|
from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
price = line.unit_price
line._onchange_product_id()
line.unit_price = price
|
from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
line._onchange_product_id()
Fix loss of unit price if editedfrom odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
price = line.unit_price
line._onchange_product_id()
line.unit_price = price
|
<commit_before>from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
line._onchange_product_id()
<commit_msg>Fix loss of unit price if edited<commit_after>from odoo import models, api
class AccountInvoice(models.Model):
_inherit = "account.invoice"
@api.onchange('fiscal_position_id')
def somko_update_tax(self):
for line in self.invoice_line_ids:
price = line.unit_price
line._onchange_product_id()
line.unit_price = price
|
a9e9705e6963569cb0c88135ce539320aef77ed6
|
examples/explorer/settings.py
|
examples/explorer/settings.py
|
import os
from rororo import GET, static
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
|
import os
from rororo import GET
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
|
Remove unnecessary import in explorer example.
|
Remove unnecessary import in explorer example.
|
Python
|
bsd-3-clause
|
playpauseandstop/rororo,playpauseandstop/rororo
|
import os
from rororo import GET, static
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
Remove unnecessary import in explorer example.
|
import os
from rororo import GET
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
|
<commit_before>import os
from rororo import GET, static
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
<commit_msg>Remove unnecessary import in explorer example.<commit_after>
|
import os
from rororo import GET
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
|
import os
from rororo import GET, static
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
Remove unnecessary import in explorer example.import os
from rororo import GET
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
|
<commit_before>import os
from rororo import GET, static
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
<commit_msg>Remove unnecessary import in explorer example.<commit_after>import os
from rororo import GET
# Debug settings
DEBUG = True
# Explorer settings
ROOT_DIR = os.path.expanduser('~')
SHOW_HIDDEN_ITEMS = True
# List of available routes
ROUTES = ('',
GET('/{path:path}', 'views.explorer', name='explorer',
renderer='explorer.html'),
)
|
c22c7a63c85b52c4e05ac0fe6a9f05960705872b
|
tests/test_application.py
|
tests/test_application.py
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation():
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation(capsys):
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
out, err = capsys.readouterr()
assert "usage: warehouse" in out
assert not err
|
Test the cli instantiation a bit better
|
Test the cli instantiation a bit better
|
Python
|
apache-2.0
|
techtonik/warehouse,robhudson/warehouse,mattrobenolt/warehouse,robhudson/warehouse,techtonik/warehouse,mattrobenolt/warehouse,mattrobenolt/warehouse
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation():
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
Test the cli instantiation a bit better
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation(capsys):
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
out, err = capsys.readouterr()
assert "usage: warehouse" in out
assert not err
|
<commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation():
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
<commit_msg>Test the cli instantiation a bit better<commit_after>
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation(capsys):
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
out, err = capsys.readouterr()
assert "usage: warehouse" in out
assert not err
|
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation():
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
Test the cli instantiation a bit better# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation(capsys):
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
out, err = capsys.readouterr()
assert "usage: warehouse" in out
assert not err
|
<commit_before># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation():
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
<commit_msg>Test the cli instantiation a bit better<commit_after># Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
import os.path
import pytest
from warehouse.application import Warehouse
def test_basic_instantiation():
Warehouse({
"debug": False,
"database": {
"url": "postgres:///test_warehouse",
}
})
def test_yaml_instantiation():
Warehouse.from_yaml(
os.path.abspath(os.path.join(
os.path.dirname(__file__),
"test_config.yml",
)),
)
def test_cli_instantiation(capsys):
with pytest.raises(SystemExit):
Warehouse.from_cli(["-h"])
out, err = capsys.readouterr()
assert "usage: warehouse" in out
assert not err
|
84c4aa73e6792dad6853866c66c756073df71f27
|
tests/test_replace_all.py
|
tests/test_replace_all.py
|
import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
|
import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
def test_block_with_two_args_in_one_line(self):
source = """[aSet enumerateObjectsUsingBlock:^(id obj, BOOL *stop){ obj = nil; } ];"""
expected = """aSet.enumerateObjectsUsingBlock(->|obj,stop|{ obj = nil } )"""
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
|
Test for block with multi args
|
Test for block with multi args
|
Python
|
mit
|
kyamaguchi/SublimeObjC2RubyMotion,kyamaguchi/SublimeObjC2RubyMotion
|
import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
Test for block with multi args
|
import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
def test_block_with_two_args_in_one_line(self):
source = """[aSet enumerateObjectsUsingBlock:^(id obj, BOOL *stop){ obj = nil; } ];"""
expected = """aSet.enumerateObjectsUsingBlock(->|obj,stop|{ obj = nil } )"""
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
<commit_msg>Test for block with multi args<commit_after>
|
import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
def test_block_with_two_args_in_one_line(self):
source = """[aSet enumerateObjectsUsingBlock:^(id obj, BOOL *stop){ obj = nil; } ];"""
expected = """aSet.enumerateObjectsUsingBlock(->|obj,stop|{ obj = nil } )"""
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
|
import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
Test for block with multi argsimport unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
def test_block_with_two_args_in_one_line(self):
source = """[aSet enumerateObjectsUsingBlock:^(id obj, BOOL *stop){ obj = nil; } ];"""
expected = """aSet.enumerateObjectsUsingBlock(->|obj,stop|{ obj = nil } )"""
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
<commit_msg>Test for block with multi args<commit_after>import unittest, os, sys
from custom_test_case import CustomTestCase
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestReplaceAll(unittest.TestCase, CustomTestCase):
# All replacement
def test_replace_objc(self):
source = 'UIWindow* aWindow = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];'
expected = 'aWindow = UIWindow.alloc.initWithFrame(UIScreen.mainScreen.bounds)'
self.assertSentence(CodeConverter(source).result(), expected)
def test_block_with_two_args_in_one_line(self):
source = """[aSet enumerateObjectsUsingBlock:^(id obj, BOOL *stop){ obj = nil; } ];"""
expected = """aSet.enumerateObjectsUsingBlock(->|obj,stop|{ obj = nil } )"""
self.assertSentence(CodeConverter(source).result(), expected)
if __name__ == '__main__':
unittest.main()
|
a47b5506476f9d0e4dbb2eb24cd22da61f42eb65
|
bixi/api.py
|
bixi/api.py
|
from tastypie.resources import ModelResource
from models import Station
class StationResource(ModelResource):
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
|
from tastypie.resources import ModelResource
from models import Station, Update
class StationResource(ModelResource):
def dehydrate(self, bundle):
update = Update.objects.filter(station__id=bundle.data['id']).latest()
bundle.data['nb_bikes'] = update.nb_bikes
bundle.data['nb_empty_docks'] = update.nb_empty_docks
return bundle
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
|
Include the number of available bikes and docks from the latest update.
|
Include the number of available bikes and docks from the latest update.
|
Python
|
bsd-3-clause
|
flebel/django-bixi
|
from tastypie.resources import ModelResource
from models import Station
class StationResource(ModelResource):
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
Include the number of available bikes and docks from the latest update.
|
from tastypie.resources import ModelResource
from models import Station, Update
class StationResource(ModelResource):
def dehydrate(self, bundle):
update = Update.objects.filter(station__id=bundle.data['id']).latest()
bundle.data['nb_bikes'] = update.nb_bikes
bundle.data['nb_empty_docks'] = update.nb_empty_docks
return bundle
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
|
<commit_before>from tastypie.resources import ModelResource
from models import Station
class StationResource(ModelResource):
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
<commit_msg>Include the number of available bikes and docks from the latest update.<commit_after>
|
from tastypie.resources import ModelResource
from models import Station, Update
class StationResource(ModelResource):
def dehydrate(self, bundle):
update = Update.objects.filter(station__id=bundle.data['id']).latest()
bundle.data['nb_bikes'] = update.nb_bikes
bundle.data['nb_empty_docks'] = update.nb_empty_docks
return bundle
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
|
from tastypie.resources import ModelResource
from models import Station
class StationResource(ModelResource):
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
Include the number of available bikes and docks from the latest update.from tastypie.resources import ModelResource
from models import Station, Update
class StationResource(ModelResource):
def dehydrate(self, bundle):
update = Update.objects.filter(station__id=bundle.data['id']).latest()
bundle.data['nb_bikes'] = update.nb_bikes
bundle.data['nb_empty_docks'] = update.nb_empty_docks
return bundle
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
|
<commit_before>from tastypie.resources import ModelResource
from models import Station
class StationResource(ModelResource):
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
<commit_msg>Include the number of available bikes and docks from the latest update.<commit_after>from tastypie.resources import ModelResource
from models import Station, Update
class StationResource(ModelResource):
def dehydrate(self, bundle):
update = Update.objects.filter(station__id=bundle.data['id']).latest()
bundle.data['nb_bikes'] = update.nb_bikes
bundle.data['nb_empty_docks'] = update.nb_empty_docks
return bundle
class Meta:
allowed_methods = ['get']
queryset = Station.objects.all()
resource_name = 'station'
|
5fc0854f54f2946c2b38a8b3c03a553c8a838aed
|
shale/webdriver.py
|
shale/webdriver.py
|
from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
#desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=False):
if session_id is not None:
self.command_executor = command_executor
if type(self.command_executor) is bytes or isinstance(self.command_executor, str):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
|
from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
if session_id is not None:
self.command_executor = command_executor
try:
string_type = basestring
except:
string_type = str
if isinstance(self.command_executor, (string_type, bytes)):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
|
Fix a string type-checking bug.
|
Fix a string type-checking bug.
|
Python
|
mit
|
cardforcoin/shale,mhluongo/shale,mhluongo/shale,cardforcoin/shale
|
from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
#desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=False):
if session_id is not None:
self.command_executor = command_executor
if type(self.command_executor) is bytes or isinstance(self.command_executor, str):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
Fix a string type-checking bug.
|
from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
if session_id is not None:
self.command_executor = command_executor
try:
string_type = basestring
except:
string_type = str
if isinstance(self.command_executor, (string_type, bytes)):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
|
<commit_before>from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
#desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=False):
if session_id is not None:
self.command_executor = command_executor
if type(self.command_executor) is bytes or isinstance(self.command_executor, str):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
<commit_msg>Fix a string type-checking bug.<commit_after>
|
from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
if session_id is not None:
self.command_executor = command_executor
try:
string_type = basestring
except:
string_type = str
if isinstance(self.command_executor, (string_type, bytes)):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
|
from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
#desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=False):
if session_id is not None:
self.command_executor = command_executor
if type(self.command_executor) is bytes or isinstance(self.command_executor, str):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
Fix a string type-checking bug.from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
if session_id is not None:
self.command_executor = command_executor
try:
string_type = basestring
except:
string_type = str
if isinstance(self.command_executor, (string_type, bytes)):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
|
<commit_before>from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
#desired_capabilities=None, browser_profile=None, proxy=None, keep_alive=False):
if session_id is not None:
self.command_executor = command_executor
if type(self.command_executor) is bytes or isinstance(self.command_executor, str):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
<commit_msg>Fix a string type-checking bug.<commit_after>from selenium import webdriver
from selenium.webdriver.remote.switch_to import SwitchTo
from selenium.webdriver.remote.mobile import Mobile
from selenium.webdriver.remote.errorhandler import ErrorHandler
from selenium.webdriver.remote.remote_connection import RemoteConnection
class ResumableRemote(webdriver.Remote):
def __init__(self, command_executor='http://127.0.0.1:4444/wd/hub',
session_id=None, **kwargs):
if session_id is not None:
self.command_executor = command_executor
try:
string_type = basestring
except:
string_type = str
if isinstance(self.command_executor, (string_type, bytes)):
self.command_executor = RemoteConnection(
command_executor, keep_alive=kwargs.get('keep_alive', False))
self.command_executor._commands['get_session'] = ('GET', '/session/$sessionId')
self._is_remote = True
self.start_client()
self.resume_session(session_id)
self._switch_to = SwitchTo(self)
self._mobile = Mobile(self)
self.error_handler = ErrorHandler()
else:
super(ResumableRemote, self).__init__(
command_executor=command_executor, **kwargs)
def resume_session(self, session_id):
self.session_id = session_id
response = self.command_executor.execute('get_session', {'sessionId': session_id})
self.capabilities = response['value']
|
6c999c654e1fffe067592067bd4314cff011cda5
|
permuta/math/counting.py
|
permuta/math/counting.py
|
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//n+1
|
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//(n+1)
|
Fix catalan since me is dumb
|
Fix catalan since me is dumb
|
Python
|
bsd-3-clause
|
PermutaTriangle/Permuta
|
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//n+1
Fix catalan since me is dumb
|
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//(n+1)
|
<commit_before>
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//n+1
<commit_msg>Fix catalan since me is dumb<commit_after>
|
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//(n+1)
|
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//n+1
Fix catalan since me is dumb
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//(n+1)
|
<commit_before>
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//n+1
<commit_msg>Fix catalan since me is dumb<commit_after>
def factorial(n):
res = 1
for i in range(2, n+1):
res *= i
return res
def binomial(n,k):
if k > n:
return 0
if n-k < k:
k = n-k
res = 1
for i in range(1,k+1):
res = res * (n - (k - i)) // i
return res
def catalan(n):
return binomial(2*n,n)//(n+1)
|
37740e4b965a59fc1508b897d791900017daae42
|
PublicWebServicesAPI_AND_servercommandScripts/addInfoToCSVreport.py
|
PublicWebServicesAPI_AND_servercommandScripts/addInfoToCSVreport.py
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
Change comment to reflect a shared account report
|
Update: Change comment to reflect a shared account report
|
Python
|
mit
|
PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples,PaperCutSoftware/PaperCutExamples
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1Update: Change comment to reflect a shared account report
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
<commit_before>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1<commit_msg>Update: Change comment to reflect a shared account report<commit_after>
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1Update: Change comment to reflect a shared account report#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
<commit_before>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to add user account notes to account_configurations.csv
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1<commit_msg>Update: Change comment to reflect a shared account report<commit_after>#!/usr/bin/env python3
from csv import reader
from sys import stdin
from xmlrpc.client import ServerProxy
from ssl import create_default_context, Purpose
# Script to user account notes to the Shared account configuration report(account_configurations.csv)
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then this address will need to be whitelisted in PaperCut
auth="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))#Create new ServerProxy Instance
# #TODO open and manipulate CSV
csv_reader = reader(stdin, delimiter=',') #Read in standard data
line_count = 0
for row in csv_reader:
if line_count == 1: #Header row
row.insert(4,"Notes data")
elif line_count > 2:
row.insert(4,proxy.api.getSharedAccountProperty(auth, row[0] + "\\" + row[2], "notes")) #Add Note data for shared account(Parent or child)
print(", ".join(row))
line_count += 1
|
023568228dc2ffcf772edb4d5335c0c755a7e37c
|
revel/setup.py
|
revel/setup.py
|
import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
|
import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get -u github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
|
Update start process to reflect new path for /cmd
|
Update start process to reflect new path for /cmd
|
Python
|
bsd-3-clause
|
raziel057/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,grob/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,methane/FrameworkBenchmarks,Verber/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sgml/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,grob/FrameworkBenchmarks,zapov/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zloster/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,leafo/FrameworkBenchmarks,joshk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,testn/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Verber/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,torhve/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,actframework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,methane/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,grob/FrameworkBenchmarks,Verber/FrameworkBenchmarks,denkab/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jamming/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,leafo/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,khellang/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,doom369/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zloster/FrameworkBenchmarks,valyala/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,herloct/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,leafo/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,dmacd/FB-try1,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zloster/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,grob/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,actframework/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,grob/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,torhve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sgml/FrameworkBenchmarks,testn/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,dmacd/FB-try1,victorbriz/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jamming/FrameworkBenchmarks,doom369/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jamming/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,valyala/FrameworkBenchmarks,doom369/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sxend/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,joshk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,grob/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,doom369/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,valyala/FrameworkBenchmarks,valyala/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,methane/FrameworkBenchmarks,dmacd/FB-try1,jeevatkm/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,methane/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,dmacd/FB-try1,dmacd/FB-try1,jebbstewart/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,zapov/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,testn/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,actframework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zloster/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,grob/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,denkab/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,testn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zapov/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,leafo/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sxend/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Verber/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,testn/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,valyala/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,denkab/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sxend/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,leafo/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,testn/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,dmacd/FB-try1,joshk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zloster/FrameworkBenchmarks,testn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,dmacd/FB-try1,kbrock/FrameworkBenchmarks,khellang/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,testn/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,torhve/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,actframework/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,methane/FrameworkBenchmarks,grob/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jamming/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,actframework/FrameworkBenchmarks,methane/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,dmacd/FB-try1,seem-sky/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,denkab/FrameworkBenchmarks,grob/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,herloct/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,methane/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,khellang/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zloster/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sxend/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Verber/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,leafo/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,khellang/FrameworkBenchmarks,denkab/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,leafo/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,actframework/FrameworkBenchmarks,khellang/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jamming/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,torhve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sgml/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,grob/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,herloct/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,leafo/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,methane/FrameworkBenchmarks,jamming/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,methane/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,denkab/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,methane/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,valyala/FrameworkBenchmarks,testn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,actframework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,doom369/FrameworkBenchmarks,methane/FrameworkBenchmarks,grob/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,grob/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zapov/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,actframework/FrameworkBenchmarks,doom369/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zloster/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,testn/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zapov/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,dmacd/FB-try1,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,testn/FrameworkBenchmarks,methane/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,doom369/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,denkab/FrameworkBenchmarks,leafo/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Verber/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,methane/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,herloct/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sgml/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,joshk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,leafo/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,methane/FrameworkBenchmarks,zapov/FrameworkBenchmarks,torhve/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zloster/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,dmacd/FB-try1,zane-techempower/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,dmacd/FB-try1,ashawnbandy-te-tfb/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,torhve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,actframework/FrameworkBenchmarks,herloct/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,grob/FrameworkBenchmarks,doom369/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,torhve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,herloct/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,leafo/FrameworkBenchmarks,dmacd/FB-try1,Rayne/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,khellang/FrameworkBenchmarks,valyala/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,denkab/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,testn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,joshk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,joshk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sxend/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jamming/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks
|
import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
Update start process to reflect new path for /cmd
|
import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get -u github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
|
<commit_before>import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
<commit_msg>Update start process to reflect new path for /cmd<commit_after>
|
import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get -u github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
|
import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
Update start process to reflect new path for /cmdimport subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get -u github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
|
<commit_before>import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/cmd", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
<commit_msg>Update start process to reflect new path for /cmd<commit_after>import subprocess
import sys
import os
import setup_util
import time
def start(args):
setup_util.replace_text("revel/src/benchmark/conf/app.conf", "tcp\(.*:3306\)", "tcp(" + args.database_host + ":3306)")
subprocess.call("go get -u github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.call("go build -o bin/revel github.com/robfig/revel/revel", shell=True, cwd="revel")
subprocess.Popen("bin/revel run benchmark prod".rsplit(" "), cwd="revel")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'revel' in line and 'run-tests' not in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
|
1cfd8618931da76fc83745a45206df08f058c453
|
pog_absolute_pointing.py
|
pog_absolute_pointing.py
|
import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print "99th percentile radius of 6m data is {} arcsec".format(dr_99_arcsec)
|
import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print("99th percentile radius of 6-month data is {} arcsec".format(dr_99_arcsec))
|
Use print as a function and tweak output text
|
Use print as a function and tweak output text
|
Python
|
bsd-2-clause
|
sot/aimpoint_mon,sot/aimpoint_mon
|
import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print "99th percentile radius of 6m data is {} arcsec".format(dr_99_arcsec)
Use print as a function and tweak output text
|
import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print("99th percentile radius of 6-month data is {} arcsec".format(dr_99_arcsec))
|
<commit_before>import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print "99th percentile radius of 6m data is {} arcsec".format(dr_99_arcsec)
<commit_msg>Use print as a function and tweak output text<commit_after>
|
import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print("99th percentile radius of 6-month data is {} arcsec".format(dr_99_arcsec))
|
import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print "99th percentile radius of 6m data is {} arcsec".format(dr_99_arcsec)
Use print as a function and tweak output textimport numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print("99th percentile radius of 6-month data is {} arcsec".format(dr_99_arcsec))
|
<commit_before>import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print "99th percentile radius of 6m data is {} arcsec".format(dr_99_arcsec)
<commit_msg>Use print as a function and tweak output text<commit_after>import numpy as np
from Chandra.Time import DateTime
import plot_aimpoint
# Get 99th percential absolute pointing radius
plot_aimpoint.opt = plot_aimpoint.get_opt()
asols = plot_aimpoint.get_asol()
# Last six months of data
asols = asols[asols['time'] > DateTime(-183).secs]
# center of box of range of data
mid_dy = (np.max(asols['dy']) + np.min(asols['dy'])) / 2.
mid_dz = (np.max(asols['dz']) + np.min(asols['dz'])) / 2.
# radius of each delta in mm (asol dy dz in mm)
dr = np.sqrt((asols['dy'] - mid_dy) ** 2 + (asols['dz'] - mid_dz) ** 2)
dr_99 = np.percentile(dr, 99)
dr_99_arcsec = dr_99 * 20
print("99th percentile radius of 6-month data is {} arcsec".format(dr_99_arcsec))
|
999ff373d40dd98f3ffccb2478ac6d464e3332e3
|
flask_gzip.py
|
flask_gzip.py
|
import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6):
self.app = app
self.compress_level = compress_level
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if not accept_encoding:
return response
encodings = accept_encoding.split(',')
if 'gzip' not in encodings:
return response
if (200 > response.status_code >= 300) or len(response.data) < 500 or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
|
import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6, minimum_size=500):
self.app = app
self.compress_level = compress_level
self.minimum_size = minimum_size
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
return response
if (200 > response.status_code >= 300) or len(response.data) < self.minimum_size or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
|
Fix Accept-Encoding match (split would result in ' gzip', which doesn't match.
|
Fix Accept-Encoding match (split would result in ' gzip', which doesn't match.
Add minimum_size attribute.
|
Python
|
mit
|
libwilliam/flask-compress,libwilliam/flask-compress,saymedia/flask-compress,libwilliam/flask-compress,saymedia/flask-compress,wichitacode/flask-compress,wichitacode/flask-compress
|
import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6):
self.app = app
self.compress_level = compress_level
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if not accept_encoding:
return response
encodings = accept_encoding.split(',')
if 'gzip' not in encodings:
return response
if (200 > response.status_code >= 300) or len(response.data) < 500 or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
Fix Accept-Encoding match (split would result in ' gzip', which doesn't match.
Add minimum_size attribute.
|
import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6, minimum_size=500):
self.app = app
self.compress_level = compress_level
self.minimum_size = minimum_size
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
return response
if (200 > response.status_code >= 300) or len(response.data) < self.minimum_size or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
|
<commit_before>import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6):
self.app = app
self.compress_level = compress_level
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if not accept_encoding:
return response
encodings = accept_encoding.split(',')
if 'gzip' not in encodings:
return response
if (200 > response.status_code >= 300) or len(response.data) < 500 or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
<commit_msg>Fix Accept-Encoding match (split would result in ' gzip', which doesn't match.
Add minimum_size attribute.<commit_after>
|
import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6, minimum_size=500):
self.app = app
self.compress_level = compress_level
self.minimum_size = minimum_size
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
return response
if (200 > response.status_code >= 300) or len(response.data) < self.minimum_size or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
|
import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6):
self.app = app
self.compress_level = compress_level
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if not accept_encoding:
return response
encodings = accept_encoding.split(',')
if 'gzip' not in encodings:
return response
if (200 > response.status_code >= 300) or len(response.data) < 500 or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
Fix Accept-Encoding match (split would result in ' gzip', which doesn't match.
Add minimum_size attribute.import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6, minimum_size=500):
self.app = app
self.compress_level = compress_level
self.minimum_size = minimum_size
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
return response
if (200 > response.status_code >= 300) or len(response.data) < self.minimum_size or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
|
<commit_before>import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6):
self.app = app
self.compress_level = compress_level
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if not accept_encoding:
return response
encodings = accept_encoding.split(',')
if 'gzip' not in encodings:
return response
if (200 > response.status_code >= 300) or len(response.data) < 500 or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
<commit_msg>Fix Accept-Encoding match (split would result in ' gzip', which doesn't match.
Add minimum_size attribute.<commit_after>import gzip
import StringIO
from flask import request
class Gzip(object):
def __init__(self, app, compress_level=6, minimum_size=500):
self.app = app
self.compress_level = compress_level
self.minimum_size = minimum_size
self.app.after_request(self.after_request)
def after_request(self, response):
accept_encoding = request.headers.get('Accept-Encoding', '')
if 'gzip' not in accept_encoding.lower():
return response
if (200 > response.status_code >= 300) or len(response.data) < self.minimum_size or 'Content-Encoding' in response.headers:
return response
gzip_buffer = StringIO.StringIO()
gzip_file = gzip.GzipFile(mode='wb', compresslevel=self.compress_level, fileobj=gzip_buffer)
gzip_file.write(response.data)
gzip_file.close()
response.data = gzip_buffer.getvalue()
response.headers['Content-Encoding'] = 'gzip'
response.headers['Content-Length'] = len(response.data)
return response
|
c878a67815ef47abdb0bf4203a23ac0ece4feda6
|
src/CameraImage.py
|
src/CameraImage.py
|
import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB,
has_alpha=False,
bits_per_sample=8,
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
|
import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
# Convert to 8-bit RGB data
if value.dtype is not N.uint8:
value = N.array(value, dtype=N.uint8)
if len(value.shape) != 3:
value = N.dstack((value, value, value))
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB, # only allowed value
has_alpha=False,
bits_per_sample=8, # only allowed value
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
|
Convert data to unsigned 8-bit when displaying
|
Convert data to unsigned 8-bit when displaying
|
Python
|
mit
|
ptomato/Beams
|
import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB,
has_alpha=False,
bits_per_sample=8,
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
Convert data to unsigned 8-bit when displaying
|
import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
# Convert to 8-bit RGB data
if value.dtype is not N.uint8:
value = N.array(value, dtype=N.uint8)
if len(value.shape) != 3:
value = N.dstack((value, value, value))
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB, # only allowed value
has_alpha=False,
bits_per_sample=8, # only allowed value
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
|
<commit_before>import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB,
has_alpha=False,
bits_per_sample=8,
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
<commit_msg>Convert data to unsigned 8-bit when displaying<commit_after>
|
import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
# Convert to 8-bit RGB data
if value.dtype is not N.uint8:
value = N.array(value, dtype=N.uint8)
if len(value.shape) != 3:
value = N.dstack((value, value, value))
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB, # only allowed value
has_alpha=False,
bits_per_sample=8, # only allowed value
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
|
import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB,
has_alpha=False,
bits_per_sample=8,
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
Convert data to unsigned 8-bit when displayingimport gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
# Convert to 8-bit RGB data
if value.dtype is not N.uint8:
value = N.array(value, dtype=N.uint8)
if len(value.shape) != 3:
value = N.dstack((value, value, value))
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB, # only allowed value
has_alpha=False,
bits_per_sample=8, # only allowed value
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
|
<commit_before>import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB,
has_alpha=False,
bits_per_sample=8,
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
<commit_msg>Convert data to unsigned 8-bit when displaying<commit_after>import gtk, gobject
import numpy as N
class CameraImage(gtk.Image):
__gproperties__ = {
'data' : (gobject.TYPE_PYOBJECT,
'Image data',
'NumPy ndarray containing the data',
gobject.PARAM_READWRITE)
}
def __init__(self):
gtk.Image.__gobject_init__(self)
self._data = N.zeros((200, 320, 3), dtype=N.uint8)
self._display_data()
def do_get_property(self, property):
if property.name == 'data':
return self._data
else:
raise AttributeError, 'unknown property %s' % property.name
def do_set_property(self, property, value):
if property.name == 'data':
# Convert to 8-bit RGB data
if value.dtype is not N.uint8:
value = N.array(value, dtype=N.uint8)
if len(value.shape) != 3:
value = N.dstack((value, value, value))
self._data = value
self._display_data()
else:
raise AttributeError, 'unknown property %s' % property.name
def _display_data(self):
# OpenCV returns the camera data transposed
pixbuf = gtk.gdk.pixbuf_new_from_data(self._data,
gtk.gdk.COLORSPACE_RGB, # only allowed value
has_alpha=False,
bits_per_sample=8, # only allowed value
width=self._data.shape[1],
height=self._data.shape[0],
rowstride=self._data.strides[0])
self.set_from_pixbuf(pixbuf)
gobject.type_register(CameraImage)
|
837f25cd2606da70130b21109e0c03d6055622cd
|
dvol_python/__init__.py
|
dvol_python/__init__.py
|
import os
from hypothesis import settings, Verbosity
settings.register_profile("ci", settings(max_examples=1000))
settings.register_profile("dev", settings(max_examples=5))
settings.register_profile("debug", settings(max_examples=10, verbosity=Verbosity.verbose))
settings.load_profile(os.environ.get(u'HYPOTHESIS_PROFILE', 'dev'))
|
Address review feedback: set hypothesis settings depending on env. Default to dev.
|
Address review feedback: set hypothesis settings depending on env. Default to dev.
|
Python
|
apache-2.0
|
ClusterHQ/dvol,ClusterHQ/dvol,ClusterHQ/dvol
|
Address review feedback: set hypothesis settings depending on env. Default to dev.
|
import os
from hypothesis import settings, Verbosity
settings.register_profile("ci", settings(max_examples=1000))
settings.register_profile("dev", settings(max_examples=5))
settings.register_profile("debug", settings(max_examples=10, verbosity=Verbosity.verbose))
settings.load_profile(os.environ.get(u'HYPOTHESIS_PROFILE', 'dev'))
|
<commit_before><commit_msg>Address review feedback: set hypothesis settings depending on env. Default to dev.<commit_after>
|
import os
from hypothesis import settings, Verbosity
settings.register_profile("ci", settings(max_examples=1000))
settings.register_profile("dev", settings(max_examples=5))
settings.register_profile("debug", settings(max_examples=10, verbosity=Verbosity.verbose))
settings.load_profile(os.environ.get(u'HYPOTHESIS_PROFILE', 'dev'))
|
Address review feedback: set hypothesis settings depending on env. Default to dev.import os
from hypothesis import settings, Verbosity
settings.register_profile("ci", settings(max_examples=1000))
settings.register_profile("dev", settings(max_examples=5))
settings.register_profile("debug", settings(max_examples=10, verbosity=Verbosity.verbose))
settings.load_profile(os.environ.get(u'HYPOTHESIS_PROFILE', 'dev'))
|
<commit_before><commit_msg>Address review feedback: set hypothesis settings depending on env. Default to dev.<commit_after>import os
from hypothesis import settings, Verbosity
settings.register_profile("ci", settings(max_examples=1000))
settings.register_profile("dev", settings(max_examples=5))
settings.register_profile("debug", settings(max_examples=10, verbosity=Verbosity.verbose))
settings.load_profile(os.environ.get(u'HYPOTHESIS_PROFILE', 'dev'))
|
|
4a65dacb992ef48dbbaf9ca168f0b4e5567abe90
|
falmer/content/models/selection_grid.py
|
falmer/content/models/selection_grid.py
|
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content import components
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
components.text.to_pair(),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
Add text to selection grid
|
Add text to selection grid
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
Add text to selection grid
|
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content import components
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
components.text.to_pair(),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
<commit_before>from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
<commit_msg>Add text to selection grid<commit_after>
|
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content import components
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
components.text.to_pair(),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
Add text to selection gridfrom wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content import components
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
components.text.to_pair(),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
<commit_before>from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
<commit_msg>Add text to selection grid<commit_after>from wagtail.core import blocks
from wagtail.core.blocks import RichTextBlock
from wagtail.core.fields import StreamField
from wagtail.admin.edit_handlers import TabbedInterface, StreamFieldPanel, ObjectList
from falmer.content import components
from falmer.content.blocks import HeroImageBlock, FalmerImageChooserBlock
from falmer.content.models.core import Page
class GridItem(blocks.StructBlock):
title = blocks.CharBlock(required=True)
link = blocks.URLBlock()
image = FalmerImageChooserBlock()
description = RichTextBlock(required=False)
class Meta:
icon = 'item'
class SelectionGridPage(Page):
body = StreamField([
('heading_hero', HeroImageBlock()),
components.text.to_pair(),
('selection_grid', blocks.ListBlock(GridItem)),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body'),
]
edit_handler = TabbedInterface([
ObjectList(content_panels, heading='Content'),
ObjectList(Page.promote_panels, heading='Promote'),
ObjectList(Page.settings_panels, heading='Settings', classname="settings"),
])
type_fields = (
'body',
)
|
ecd0c00766304f1e5b12e6067a846033a4ee36d5
|
txlege84/topics/admin.py
|
txlege84/topics/admin.py
|
from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
admin.site.register(Topic)
admin.site.register(Issue)
admin.site.register(Stream)
admin.site.register(StoryPointer)
|
from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
@admin.register(Issue)
class IssueAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Topic)
admin.site.register(Stream)
admin.site.register(StoryPointer)
|
Move Issue ModelAdmin to new register syntax
|
Move Issue ModelAdmin to new register syntax
|
Python
|
mit
|
texastribune/txlege84,texastribune/txlege84,texastribune/txlege84,texastribune/txlege84
|
from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
admin.site.register(Topic)
admin.site.register(Issue)
admin.site.register(Stream)
admin.site.register(StoryPointer)
Move Issue ModelAdmin to new register syntax
|
from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
@admin.register(Issue)
class IssueAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Topic)
admin.site.register(Stream)
admin.site.register(StoryPointer)
|
<commit_before>from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
admin.site.register(Topic)
admin.site.register(Issue)
admin.site.register(Stream)
admin.site.register(StoryPointer)
<commit_msg>Move Issue ModelAdmin to new register syntax<commit_after>
|
from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
@admin.register(Issue)
class IssueAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Topic)
admin.site.register(Stream)
admin.site.register(StoryPointer)
|
from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
admin.site.register(Topic)
admin.site.register(Issue)
admin.site.register(Stream)
admin.site.register(StoryPointer)
Move Issue ModelAdmin to new register syntaxfrom django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
@admin.register(Issue)
class IssueAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Topic)
admin.site.register(Stream)
admin.site.register(StoryPointer)
|
<commit_before>from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
admin.site.register(Topic)
admin.site.register(Issue)
admin.site.register(Stream)
admin.site.register(StoryPointer)
<commit_msg>Move Issue ModelAdmin to new register syntax<commit_after>from django.contrib import admin
from topics.models import Issue, StoryPointer, Stream, Topic
@admin.register(Issue)
class IssueAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
admin.site.register(Topic)
admin.site.register(Stream)
admin.site.register(StoryPointer)
|
d76cbdd768964a2583cf28ab9efaf46964c815ae
|
swf/core.py
|
swf/core.py
|
# -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
'aws_access_key_id': None,
'aws_secret_access_key': None
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for credkey in ('aws_access_key_id', 'aws_secret_access_key'):
if AWS_CREDENTIALS.get(credkey):
setattr(self, credkey, AWS_CREDENTIALS[credkey])
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
self.aws_access_key_id,
self.aws_secret_access_key
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
|
# -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
#'aws_access_key_id': AWS_ACCESS_KEY_ID,
#'aws_secret_access_key': AWS_SECRET_ACCESS_KEY,
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
AWS_CREDENTIALS['aws_access_key_id'],
AWS_CREDENTIALS['aws_secret_access_key'],
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
|
Update ConnectedSWFObject: raise KeyError if credentials are not set
|
Update ConnectedSWFObject: raise KeyError if credentials are not set
|
Python
|
mit
|
botify-labs/python-simple-workflow,botify-labs/python-simple-workflow
|
# -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
'aws_access_key_id': None,
'aws_secret_access_key': None
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for credkey in ('aws_access_key_id', 'aws_secret_access_key'):
if AWS_CREDENTIALS.get(credkey):
setattr(self, credkey, AWS_CREDENTIALS[credkey])
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
self.aws_access_key_id,
self.aws_secret_access_key
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
Update ConnectedSWFObject: raise KeyError if credentials are not set
|
# -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
#'aws_access_key_id': AWS_ACCESS_KEY_ID,
#'aws_secret_access_key': AWS_SECRET_ACCESS_KEY,
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
AWS_CREDENTIALS['aws_access_key_id'],
AWS_CREDENTIALS['aws_secret_access_key'],
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
|
<commit_before># -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
'aws_access_key_id': None,
'aws_secret_access_key': None
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for credkey in ('aws_access_key_id', 'aws_secret_access_key'):
if AWS_CREDENTIALS.get(credkey):
setattr(self, credkey, AWS_CREDENTIALS[credkey])
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
self.aws_access_key_id,
self.aws_secret_access_key
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
<commit_msg>Update ConnectedSWFObject: raise KeyError if credentials are not set<commit_after>
|
# -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
#'aws_access_key_id': AWS_ACCESS_KEY_ID,
#'aws_secret_access_key': AWS_SECRET_ACCESS_KEY,
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
AWS_CREDENTIALS['aws_access_key_id'],
AWS_CREDENTIALS['aws_secret_access_key'],
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
|
# -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
'aws_access_key_id': None,
'aws_secret_access_key': None
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for credkey in ('aws_access_key_id', 'aws_secret_access_key'):
if AWS_CREDENTIALS.get(credkey):
setattr(self, credkey, AWS_CREDENTIALS[credkey])
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
self.aws_access_key_id,
self.aws_secret_access_key
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
Update ConnectedSWFObject: raise KeyError if credentials are not set# -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
#'aws_access_key_id': AWS_ACCESS_KEY_ID,
#'aws_secret_access_key': AWS_SECRET_ACCESS_KEY,
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
AWS_CREDENTIALS['aws_access_key_id'],
AWS_CREDENTIALS['aws_secret_access_key'],
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
|
<commit_before># -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
'aws_access_key_id': None,
'aws_secret_access_key': None
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for credkey in ('aws_access_key_id', 'aws_secret_access_key'):
if AWS_CREDENTIALS.get(credkey):
setattr(self, credkey, AWS_CREDENTIALS[credkey])
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
self.aws_access_key_id,
self.aws_secret_access_key
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
<commit_msg>Update ConnectedSWFObject: raise KeyError if credentials are not set<commit_after># -*- coding:utf-8 -*-
from boto.swf.layer1 import Layer1
AWS_CREDENTIALS = {
#'aws_access_key_id': AWS_ACCESS_KEY_ID,
#'aws_secret_access_key': AWS_SECRET_ACCESS_KEY,
}
def set_aws_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
AWS_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class ConnectedSWFObject(object):
"""Authenticated object interface
Once inherited, implements the AWS authentication
into the child, adding a `connection` property.
"""
def __init__(self, *args, **kwargs):
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self.connection = Layer1(
AWS_CREDENTIALS['aws_access_key_id'],
AWS_CREDENTIALS['aws_secret_access_key'],
)
def exists(self):
"""Checks if the connected swf object exists amazon-side"""
raise NotImplemented
def save(self):
"""Creates the connected swf object amazon side"""
raise NotImplemented
def deprecate(self):
"""Deprecates the connected swf object amazon side"""
raise NotImplemented
|
abb1d2db9052391c78fb09952b58a5331046aae5
|
pylinks/links/tests.py
|
pylinks/links/tests.py
|
from django.test import TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
|
from django.test import Client, TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
def test_increment_visits(self):
self.link.save()
client = Client()
response = client.get('/links/go/%d/' % self.link.id)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], self.link.url)
self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1)
|
Add test for link redirect
|
Add test for link redirect
|
Python
|
mit
|
michaelmior/pylinks,michaelmior/pylinks,michaelmior/pylinks
|
from django.test import TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
Add test for link redirect
|
from django.test import Client, TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
def test_increment_visits(self):
self.link.save()
client = Client()
response = client.get('/links/go/%d/' % self.link.id)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], self.link.url)
self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1)
|
<commit_before>from django.test import TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
<commit_msg>Add test for link redirect<commit_after>
|
from django.test import Client, TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
def test_increment_visits(self):
self.link.save()
client = Client()
response = client.get('/links/go/%d/' % self.link.id)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], self.link.url)
self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1)
|
from django.test import TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
Add test for link redirectfrom django.test import Client, TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
def test_increment_visits(self):
self.link.save()
client = Client()
response = client.get('/links/go/%d/' % self.link.id)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], self.link.url)
self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1)
|
<commit_before>from django.test import TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
<commit_msg>Add test for link redirect<commit_after>from django.test import Client, TestCase
from .models import Category, Link
class CategoryModelTests(TestCase):
def test_category_sort(self):
Category(title='Test 2', slug='test2').save()
Category(title='Test 1', slug='test1').save()
self.assertEqual(['Test 1', 'Test 2'], map(str, Category.objects.all()))
class LinkModelTests(TestCase):
def setUp(self):
self.url = 'https://github.com/'
self.link = Link(title='GitHub', url=self.url)
def test_track_link(self):
self.assertEqual(self.link.get_absolute_url(), self.url)
self.link.save()
self.assertEqual(self.link.visits, 0)
self.assertEqual(self.link.get_absolute_url(), '/links/go/%d/' % self.link.id)
def test_link_title(self):
self.assertEqual(str(self.link), 'GitHub')
def test_increment_visits(self):
self.link.save()
client = Client()
response = client.get('/links/go/%d/' % self.link.id)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], self.link.url)
self.assertEqual(Link.objects.get(pk=self.link.id).visits, 1)
|
045574a936df26798962f230568de33458495c09
|
spacy/about.py
|
spacy/about.py
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.0a0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
Update package name and increment version
|
Update package name and increment version
|
Python
|
mit
|
aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.0a0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
Update package name and increment version
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
<commit_before># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.0a0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
<commit_msg>Update package name and increment version<commit_after>
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.0a0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
Update package name and increment version# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
<commit_before># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy'
__version__ = '2.0.0a0'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
<commit_msg>Update package name and increment version<commit_after># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__title__ = 'spacy-nightly'
__version__ = '2.0.0a1'
__summary__ = 'Industrial-strength Natural Language Processing (NLP) with Python and Cython'
__uri__ = 'https://spacy.io'
__author__ = 'Explosion AI'
__email__ = 'contact@explosion.ai'
__license__ = 'MIT'
__docs_models__ = 'https://spacy.io/docs/usage/models'
__download_url__ = 'https://github.com/explosion/spacy-models/releases/download'
__compatibility__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json'
__shortcuts__ = 'https://raw.githubusercontent.com/explosion/spacy-models/master/shortcuts.json'
__model_files__ = 'https://raw.githubusercontent.com/explosion/spacy-dev-resources/develop/templates/model/'
|
4f1bbe6435f2c899915ab72d990a649d4e494553
|
grum/views.py
|
grum/views.py
|
from grum import app, db
from grum.models import User
from flask import render_template, request
@app.route("/")
def main():
# # Login verification code
# username = request.form('username')
# password = request.form('password')
#
# user = User.query.filter_by(username=username).first_or_404()
# if user.validate_password(password):
# # Logged in
# # Not Logged In
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form('username')
password = request.form('password')
confirm_password = request.form('confirm')
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
|
from grum import app, db
from grum.models import User
from flask import render_template, request, redirect
@app.route("/", methods=['GET', 'POST'])
def main():
if request.method == "POST":
# Login verification code
username = request.form['username']
password = request.form['password']
user = User.query.filter_by(username=username).first_or_404()
if user.validate_password(password):
return redirect("/mail")
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form['username']
password = request.form['password']
confirm_password = request.form['confirm']
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return redirect("/mail")
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
|
Fix register and login y0
|
Fix register and login y0
|
Python
|
mit
|
Grum-Hackdee/grum-web,Grum-Hackdee/grum-web,Grum-Hackdee/grum-web,Grum-Hackdee/grum-web
|
from grum import app, db
from grum.models import User
from flask import render_template, request
@app.route("/")
def main():
# # Login verification code
# username = request.form('username')
# password = request.form('password')
#
# user = User.query.filter_by(username=username).first_or_404()
# if user.validate_password(password):
# # Logged in
# # Not Logged In
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form('username')
password = request.form('password')
confirm_password = request.form('confirm')
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
Fix register and login y0
|
from grum import app, db
from grum.models import User
from flask import render_template, request, redirect
@app.route("/", methods=['GET', 'POST'])
def main():
if request.method == "POST":
# Login verification code
username = request.form['username']
password = request.form['password']
user = User.query.filter_by(username=username).first_or_404()
if user.validate_password(password):
return redirect("/mail")
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form['username']
password = request.form['password']
confirm_password = request.form['confirm']
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return redirect("/mail")
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
|
<commit_before>from grum import app, db
from grum.models import User
from flask import render_template, request
@app.route("/")
def main():
# # Login verification code
# username = request.form('username')
# password = request.form('password')
#
# user = User.query.filter_by(username=username).first_or_404()
# if user.validate_password(password):
# # Logged in
# # Not Logged In
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form('username')
password = request.form('password')
confirm_password = request.form('confirm')
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
<commit_msg>Fix register and login y0<commit_after>
|
from grum import app, db
from grum.models import User
from flask import render_template, request, redirect
@app.route("/", methods=['GET', 'POST'])
def main():
if request.method == "POST":
# Login verification code
username = request.form['username']
password = request.form['password']
user = User.query.filter_by(username=username).first_or_404()
if user.validate_password(password):
return redirect("/mail")
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form['username']
password = request.form['password']
confirm_password = request.form['confirm']
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return redirect("/mail")
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
|
from grum import app, db
from grum.models import User
from flask import render_template, request
@app.route("/")
def main():
# # Login verification code
# username = request.form('username')
# password = request.form('password')
#
# user = User.query.filter_by(username=username).first_or_404()
# if user.validate_password(password):
# # Logged in
# # Not Logged In
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form('username')
password = request.form('password')
confirm_password = request.form('confirm')
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
Fix register and login y0from grum import app, db
from grum.models import User
from flask import render_template, request, redirect
@app.route("/", methods=['GET', 'POST'])
def main():
if request.method == "POST":
# Login verification code
username = request.form['username']
password = request.form['password']
user = User.query.filter_by(username=username).first_or_404()
if user.validate_password(password):
return redirect("/mail")
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form['username']
password = request.form['password']
confirm_password = request.form['confirm']
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return redirect("/mail")
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
|
<commit_before>from grum import app, db
from grum.models import User
from flask import render_template, request
@app.route("/")
def main():
# # Login verification code
# username = request.form('username')
# password = request.form('password')
#
# user = User.query.filter_by(username=username).first_or_404()
# if user.validate_password(password):
# # Logged in
# # Not Logged In
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form('username')
password = request.form('password')
confirm_password = request.form('confirm')
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
<commit_msg>Fix register and login y0<commit_after>from grum import app, db
from grum.models import User
from flask import render_template, request, redirect
@app.route("/", methods=['GET', 'POST'])
def main():
if request.method == "POST":
# Login verification code
username = request.form['username']
password = request.form['password']
user = User.query.filter_by(username=username).first_or_404()
if user.validate_password(password):
return redirect("/mail")
return render_template("index.html")
@app.route("/register", methods=['GET', 'POST'])
def register():
if request.method == "POST":
username = request.form['username']
password = request.form['password']
confirm_password = request.form['confirm']
if password != confirm_password:
return redirect("/register")
new_user = User(
username=username,
password=password
)
db.session.add(new_user)
db.session.commit()
return redirect("/mail")
return render_template("register.html")
@app.route("/mail")
def mail():
return render_template('mail.html')
|
89d13cdf811d7ab499303380b549fbc4c9877076
|
confu/recipes/googlebenchmark.py
|
confu/recipes/googlebenchmark.py
|
#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
|
#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"statistics.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
|
Update recipe for Google Benchmark
|
Update recipe for Google Benchmark
|
Python
|
mit
|
Maratyszcza/confu,Maratyszcza/confu
|
#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
Update recipe for Google Benchmark
|
#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"statistics.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
|
<commit_before>#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
<commit_msg>Update recipe for Google Benchmark<commit_after>
|
#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"statistics.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
|
#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
Update recipe for Google Benchmark#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"statistics.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
|
<commit_before>#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
<commit_msg>Update recipe for Google Benchmark<commit_after>#!/usr/bin/env python
def setup(root_dir):
import confu.git
repo = confu.git.clone("https://github.com/google/benchmark.git", root_dir)
from os import path
recipes_dir = path.dirname(path.abspath(__file__))
import shutil
shutil.copyfile(
path.join(recipes_dir, "googlebenchmark.yaml"),
path.join(root_dir, "confu.yaml"))
def main(args, root_dir=None):
import confu
options = confu.standard_parser("Google micro-Benchmark framework configuration script").parse_args(args)
build = confu.Build.from_options(options, root_dir=root_dir)
build.export_cpath("include", ["benchmark/*.h"])
source_files = [
"benchmark.cc",
"benchmark_register.cc",
"colorprint.cc",
"commandlineflags.cc",
"complexity.cc",
"console_reporter.cc",
"counter.cc",
"csv_reporter.cc",
"json_reporter.cc",
"reporter.cc",
"sleep.cc",
"statistics.cc",
"string_util.cc",
"sysinfo.cc",
"timers.cc",
]
macros = [
"HAVE_POSIX_REGEX",
"NDEBUG",
]
with build.options(source_dir="src", macros=macros, extra_include_dirs="src"):
build.static_library("googlebenchmark",
[build.cxx(source) for source in source_files])
return build
|
252cfa3baa7973a923952ecb3c83cdfb9f28ab67
|
l10n_br_account/models/fiscal_document.py
|
l10n_br_account/models/fiscal_document.py
|
# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, models
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
|
# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import _, api, models
from odoo.exceptions import UserError
from odoo.addons.l10n_br_fiscal.constants.fiscal import (
SITUACAO_EDOC_EM_DIGITACAO,
)
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
draft_documents = self.filtered(
lambda d: d.state == SITUACAO_EDOC_EM_DIGITACAO)
if draft_documents:
UserError(_("You cannot delete a fiscal document "
"which is not draft state."))
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
|
Allow delete only fiscal documents with draft state
|
[REF] Allow delete only fiscal documents with draft state
|
Python
|
agpl-3.0
|
OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil
|
# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, models
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
[REF] Allow delete only fiscal documents with draft state
|
# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import _, api, models
from odoo.exceptions import UserError
from odoo.addons.l10n_br_fiscal.constants.fiscal import (
SITUACAO_EDOC_EM_DIGITACAO,
)
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
draft_documents = self.filtered(
lambda d: d.state == SITUACAO_EDOC_EM_DIGITACAO)
if draft_documents:
UserError(_("You cannot delete a fiscal document "
"which is not draft state."))
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
|
<commit_before># Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, models
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
<commit_msg>[REF] Allow delete only fiscal documents with draft state<commit_after>
|
# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import _, api, models
from odoo.exceptions import UserError
from odoo.addons.l10n_br_fiscal.constants.fiscal import (
SITUACAO_EDOC_EM_DIGITACAO,
)
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
draft_documents = self.filtered(
lambda d: d.state == SITUACAO_EDOC_EM_DIGITACAO)
if draft_documents:
UserError(_("You cannot delete a fiscal document "
"which is not draft state."))
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
|
# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, models
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
[REF] Allow delete only fiscal documents with draft state# Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import _, api, models
from odoo.exceptions import UserError
from odoo.addons.l10n_br_fiscal.constants.fiscal import (
SITUACAO_EDOC_EM_DIGITACAO,
)
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
draft_documents = self.filtered(
lambda d: d.state == SITUACAO_EDOC_EM_DIGITACAO)
if draft_documents:
UserError(_("You cannot delete a fiscal document "
"which is not draft state."))
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
|
<commit_before># Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import api, models
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
<commit_msg>[REF] Allow delete only fiscal documents with draft state<commit_after># Copyright (C) 2009 - TODAY Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from odoo import _, api, models
from odoo.exceptions import UserError
from odoo.addons.l10n_br_fiscal.constants.fiscal import (
SITUACAO_EDOC_EM_DIGITACAO,
)
class FiscalDocument(models.Model):
_inherit = 'l10n_br_fiscal.document'
@api.multi
def unlink(self):
draft_documents = self.filtered(
lambda d: d.state == SITUACAO_EDOC_EM_DIGITACAO)
if draft_documents:
UserError(_("You cannot delete a fiscal document "
"which is not draft state."))
invoices = self.env['account.invoice'].search(
[('fiscal_document_id', 'in', self.ids)])
invoices.unlink()
return super().unlink()
|
21f6d03449217952cb981719345eccfbb1ec84b3
|
isogram/isogram.py
|
isogram/isogram.py
|
from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
|
from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
# You could also achieve this using "c.isalpha()" instead of LOWERCASE
# You would then not need to import from `string`, but it's marginally slower
|
Add note about str.isalpha() method as an alternative
|
Add note about str.isalpha() method as an alternative
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
Add note about str.isalpha() method as an alternative
|
from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
# You could also achieve this using "c.isalpha()" instead of LOWERCASE
# You would then not need to import from `string`, but it's marginally slower
|
<commit_before>from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
<commit_msg>Add note about str.isalpha() method as an alternative<commit_after>
|
from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
# You could also achieve this using "c.isalpha()" instead of LOWERCASE
# You would then not need to import from `string`, but it's marginally slower
|
from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
Add note about str.isalpha() method as an alternativefrom string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
# You could also achieve this using "c.isalpha()" instead of LOWERCASE
# You would then not need to import from `string`, but it's marginally slower
|
<commit_before>from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
<commit_msg>Add note about str.isalpha() method as an alternative<commit_after>from string import ascii_lowercase
LOWERCASE = set(ascii_lowercase)
def is_isogram(s):
chars = [c for c in s.lower() if c in LOWERCASE]
return len(chars) == len(set(chars))
# You could also achieve this using "c.isalpha()" instead of LOWERCASE
# You would then not need to import from `string`, but it's marginally slower
|
54a1f1774517faf377ae43f1bad4a4f5c0b0c562
|
accelerator/tests/contexts/judging_round_context.py
|
accelerator/tests/contexts/judging_round_context.py
|
from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form)
return judging_form
|
from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form,
mandatory=True,
element_type="feedback")
return judging_form
|
Add some values to the default judging_form_element
|
[AC-7310] Add some values to the default judging_form_element
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form)
return judging_form
[AC-7310] Add some values to the default judging_form_element
|
from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form,
mandatory=True,
element_type="feedback")
return judging_form
|
<commit_before>from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form)
return judging_form
<commit_msg>[AC-7310] Add some values to the default judging_form_element<commit_after>
|
from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form,
mandatory=True,
element_type="feedback")
return judging_form
|
from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form)
return judging_form
[AC-7310] Add some values to the default judging_form_elementfrom accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form,
mandatory=True,
element_type="feedback")
return judging_form
|
<commit_before>from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form)
return judging_form
<commit_msg>[AC-7310] Add some values to the default judging_form_element<commit_after>from accelerator.tests.factories import (
JudgingFormFactory,
JudgingFormElementFactory,
JudgingRoundFactory,
)
from accelerator_abstract.models import FORM_ELEM_OVERALL_RECOMMENDATION
class JudgingRoundContext:
def __init__(self, **kwargs):
if kwargs.get("is_active") is True:
should_be_active = True
kwargs["is_active"] = False
else:
should_be_active = False
self.judging_round = JudgingRoundFactory(**kwargs)
if should_be_active:
self.activate_judging_round()
def activate_judging_round(self):
self.judging_form = self.prepare_judging_form()
self.judging_round.judging_form = self.judging_form
self.judging_round.is_active=True
self.judging_round.save()
def prepare_judging_form(self):
judging_form = JudgingFormFactory()
JudgingFormElementFactory(
element_name=FORM_ELEM_OVERALL_RECOMMENDATION,
form_type=judging_form,
mandatory=True,
element_type="feedback")
return judging_form
|
dfeccf96499584d6b19c0734e6041e0d4b5947a1
|
knowledge/admin.py
|
knowledge/admin.py
|
from django.contrib import admin
from knowledge.models import Question, Response, Category
from portalpractices.models import Company, Author
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class CompanyAdmin(admin.ModelAdmin):
list_display = [f.name for f in Company._meta.fields]
list_select_related = True
raw_id_fields = ['external_id']
admin.site.register(Company, CompanyAdmin)
class AuthorAdmin(admin.ModelAdmin):
list_display = [f.name for f in Author._meta.fields]
list_select_related = True
raw_id_fields = ['company']
admin.site.register(Author, AuthorAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
|
from django.contrib import admin
from knowledge.models import Question, Response, Category
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
|
Update to remove references to Portal Practices
|
Update to remove references to Portal Practices
|
Python
|
isc
|
CantemoInternal/django-knowledge,CantemoInternal/django-knowledge,CantemoInternal/django-knowledge
|
from django.contrib import admin
from knowledge.models import Question, Response, Category
from portalpractices.models import Company, Author
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class CompanyAdmin(admin.ModelAdmin):
list_display = [f.name for f in Company._meta.fields]
list_select_related = True
raw_id_fields = ['external_id']
admin.site.register(Company, CompanyAdmin)
class AuthorAdmin(admin.ModelAdmin):
list_display = [f.name for f in Author._meta.fields]
list_select_related = True
raw_id_fields = ['company']
admin.site.register(Author, AuthorAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
Update to remove references to Portal Practices
|
from django.contrib import admin
from knowledge.models import Question, Response, Category
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
|
<commit_before>
from django.contrib import admin
from knowledge.models import Question, Response, Category
from portalpractices.models import Company, Author
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class CompanyAdmin(admin.ModelAdmin):
list_display = [f.name for f in Company._meta.fields]
list_select_related = True
raw_id_fields = ['external_id']
admin.site.register(Company, CompanyAdmin)
class AuthorAdmin(admin.ModelAdmin):
list_display = [f.name for f in Author._meta.fields]
list_select_related = True
raw_id_fields = ['company']
admin.site.register(Author, AuthorAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
<commit_msg>Update to remove references to Portal Practices<commit_after>
|
from django.contrib import admin
from knowledge.models import Question, Response, Category
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
|
from django.contrib import admin
from knowledge.models import Question, Response, Category
from portalpractices.models import Company, Author
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class CompanyAdmin(admin.ModelAdmin):
list_display = [f.name for f in Company._meta.fields]
list_select_related = True
raw_id_fields = ['external_id']
admin.site.register(Company, CompanyAdmin)
class AuthorAdmin(admin.ModelAdmin):
list_display = [f.name for f in Author._meta.fields]
list_select_related = True
raw_id_fields = ['company']
admin.site.register(Author, AuthorAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
Update to remove references to Portal Practices
from django.contrib import admin
from knowledge.models import Question, Response, Category
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
|
<commit_before>
from django.contrib import admin
from knowledge.models import Question, Response, Category
from portalpractices.models import Company, Author
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class CompanyAdmin(admin.ModelAdmin):
list_display = [f.name for f in Company._meta.fields]
list_select_related = True
raw_id_fields = ['external_id']
admin.site.register(Company, CompanyAdmin)
class AuthorAdmin(admin.ModelAdmin):
list_display = [f.name for f in Author._meta.fields]
list_select_related = True
raw_id_fields = ['company']
admin.site.register(Author, AuthorAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
<commit_msg>Update to remove references to Portal Practices<commit_after>
from django.contrib import admin
from knowledge.models import Question, Response, Category
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
|
a8bbe98f07e00cc6a9e9d076c6ed39c5d3136658
|
aldryn_apphooks_config/models.py
|
aldryn_apphooks_config/models.py
|
# -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)
|
# -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)
def __getattr__(self, item):
"""
This allows to access config form attribute as normal model fields
:param item:
:return:
"""
try:
return getattr(self.app_data.config, item)
except:
raise AttributeError('attribute %s not found' % item)
|
Add shortcut to get configuration data
|
Add shortcut to get configuration data
|
Python
|
bsd-3-clause
|
aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config,aldryn/aldryn-apphooks-config
|
# -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)Add shortcut to get configuration data
|
# -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)
def __getattr__(self, item):
"""
This allows to access config form attribute as normal model fields
:param item:
:return:
"""
try:
return getattr(self.app_data.config, item)
except:
raise AttributeError('attribute %s not found' % item)
|
<commit_before># -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)<commit_msg>Add shortcut to get configuration data<commit_after>
|
# -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)
def __getattr__(self, item):
"""
This allows to access config form attribute as normal model fields
:param item:
:return:
"""
try:
return getattr(self.app_data.config, item)
except:
raise AttributeError('attribute %s not found' % item)
|
# -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)Add shortcut to get configuration data# -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)
def __getattr__(self, item):
"""
This allows to access config form attribute as normal model fields
:param item:
:return:
"""
try:
return getattr(self.app_data.config, item)
except:
raise AttributeError('attribute %s not found' % item)
|
<commit_before># -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)<commit_msg>Add shortcut to get configuration data<commit_after># -*- coding: utf-8 -*-
from app_data import AppDataField
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class AppHookConfig(models.Model):
"""
This is the generic (abstract) model that holds the configurations for each AppHookConfig
concrete model
"""
type = models.CharField(_('type'), max_length=100)
namespace = models.CharField(_(u'instance namespace'), default=None, max_length=100)
app_data = AppDataField()
cmsapp = None
class Meta:
verbose_name = _(u'Apphook config')
verbose_name_plural = _(u'Apphook configs')
unique_together = ('type', 'namespace')
abstract = True
def save(self, *args, **kwargs):
self.type = '%s.%s' % (
self.__class__.__module__, self.__class__.__name__)
super(AppHookConfig, self).save(*args, **kwargs)
def __str__(self):
if self.cmsapp:
return _(u'%s / %s') % (self.cmsapp.name, self.namespace)
else:
return _(u'%s / %s') % (self.type, self.namespace)
def __getattr__(self, item):
"""
This allows to access config form attribute as normal model fields
:param item:
:return:
"""
try:
return getattr(self.app_data.config, item)
except:
raise AttributeError('attribute %s not found' % item)
|
2f280e34762ad4910ff9e5041c2bf24f8283368c
|
src-backend/registration/tests/test_user.py
|
src-backend/registration/tests/test_user.py
|
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
try:
token = Token.objects.get(user=self.test_user)
self.assertFalse(token is None)
except ObjectDoesNotExist:
self.assertTrue(False)
|
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
token = Token.objects.get(user=self.test_user)
assert_false(token is None)
|
Use nose test tools for the user test
|
Use nose test tools for the user test
|
Python
|
bsd-3-clause
|
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
|
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
try:
token = Token.objects.get(user=self.test_user)
self.assertFalse(token is None)
except ObjectDoesNotExist:
self.assertTrue(False)
Use nose test tools for the user test
|
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
token = Token.objects.get(user=self.test_user)
assert_false(token is None)
|
<commit_before>from django.test import TestCase
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
try:
token = Token.objects.get(user=self.test_user)
self.assertFalse(token is None)
except ObjectDoesNotExist:
self.assertTrue(False)
<commit_msg>Use nose test tools for the user test<commit_after>
|
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
token = Token.objects.get(user=self.test_user)
assert_false(token is None)
|
from django.test import TestCase
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
try:
token = Token.objects.get(user=self.test_user)
self.assertFalse(token is None)
except ObjectDoesNotExist:
self.assertTrue(False)
Use nose test tools for the user testfrom django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
token = Token.objects.get(user=self.test_user)
assert_false(token is None)
|
<commit_before>from django.test import TestCase
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.authtoken.models import Token
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
try:
token = Token.objects.get(user=self.test_user)
self.assertFalse(token is None)
except ObjectDoesNotExist:
self.assertTrue(False)
<commit_msg>Use nose test tools for the user test<commit_after>from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from nose.tools import assert_false
class UserTest(TestCase):
def setUp(self):
self.test_user = User.objects.create_user('username', 'test@test.com', 'password')
self.test_user.save()
def test_user_has_token(self):
token = Token.objects.get(user=self.test_user)
assert_false(token is None)
|
40c9c762ce65e0e231a14745cdc274be6c927a74
|
byceps/services/shop/storefront/models.py
|
byceps/services/shop/storefront/models.py
|
"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
|
"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), index=True, nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
|
Add index for storefront's shop ID
|
Add index for storefront's shop ID
DDL:
CREATE INDEX ix_shop_storefronts_shop_id ON shop_storefronts (shop_id);
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
Add index for storefront's shop ID
DDL:
CREATE INDEX ix_shop_storefronts_shop_id ON shop_storefronts (shop_id);
|
"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), index=True, nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
|
<commit_before>"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
<commit_msg>Add index for storefront's shop ID
DDL:
CREATE INDEX ix_shop_storefronts_shop_id ON shop_storefronts (shop_id);<commit_after>
|
"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), index=True, nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
|
"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
Add index for storefront's shop ID
DDL:
CREATE INDEX ix_shop_storefronts_shop_id ON shop_storefronts (shop_id);"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), index=True, nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
|
<commit_before>"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
<commit_msg>Add index for storefront's shop ID
DDL:
CREATE INDEX ix_shop_storefronts_shop_id ON shop_storefronts (shop_id);<commit_after>"""
byceps.services.shop.storefront.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ....database import db
from ....util.instances import ReprBuilder
from ..sequence.transfer.models import NumberSequenceID
from ..shop.transfer.models import ShopID
from .transfer.models import StorefrontID
class Storefront(db.Model):
"""A storefront.
The entrypoint from a site to a shop.
"""
__tablename__ = 'shop_storefronts'
id = db.Column(db.UnicodeText, primary_key=True)
shop_id = db.Column(db.UnicodeText, db.ForeignKey('shops.id'), index=True, nullable=False)
order_number_sequence_id = db.Column(db.Uuid, db.ForeignKey('shop_sequences.id'), nullable=False)
closed = db.Column(db.Boolean, nullable=False)
def __init__(
self,
storefront_id: StorefrontID,
shop_id: ShopID,
order_number_sequence_id: NumberSequenceID,
closed: bool,
) -> None:
self.id = storefront_id
self.shop_id = shop_id
self.order_number_sequence_id = order_number_sequence_id
self.closed = closed
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_with_lookup('id') \
.add_with_lookup('shop_id') \
.build()
|
4e3773d96a47b88529a01fa4c4a0f25bf1b77b1c
|
lib/github_test.py
|
lib/github_test.py
|
import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
def test_repo_from_path(self):
up = "/Users/MorehouseJ09/Desktop/github-issues.vim"
self.assertTrue(github.repo_from_path(up))
if __name__ == "__main__":
unittest.main()
|
import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
if __name__ == "__main__":
unittest.main()
|
Remove old repo_from_path tests. This is a very hard functionality to test
|
Remove old repo_from_path tests. This is a very hard functionality to
test
|
Python
|
mit
|
jonmorehouse/vimhub
|
import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
def test_repo_from_path(self):
up = "/Users/MorehouseJ09/Desktop/github-issues.vim"
self.assertTrue(github.repo_from_path(up))
if __name__ == "__main__":
unittest.main()
Remove old repo_from_path tests. This is a very hard functionality to
test
|
import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
def test_repo_from_path(self):
up = "/Users/MorehouseJ09/Desktop/github-issues.vim"
self.assertTrue(github.repo_from_path(up))
if __name__ == "__main__":
unittest.main()
<commit_msg>Remove old repo_from_path tests. This is a very hard functionality to
test<commit_after>
|
import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
if __name__ == "__main__":
unittest.main()
|
import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
def test_repo_from_path(self):
up = "/Users/MorehouseJ09/Desktop/github-issues.vim"
self.assertTrue(github.repo_from_path(up))
if __name__ == "__main__":
unittest.main()
Remove old repo_from_path tests. This is a very hard functionality to
testimport unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
def test_repo_from_path(self):
up = "/Users/MorehouseJ09/Desktop/github-issues.vim"
self.assertTrue(github.repo_from_path(up))
if __name__ == "__main__":
unittest.main()
<commit_msg>Remove old repo_from_path tests. This is a very hard functionality to
test<commit_after>import unittest
import github
class TestGithub(unittest.TestCase):
def setUp(self):
pass
def test_user(self):
u = github.user()
u1 = github.user()
self.assertTrue(u)
# make sure hash works
self.assertTrue(u is u1)
def test_has_issues(self):
self.assertTrue(github.has_issues(("jonmorehouse/vim-github")))
self.assertFalse(github.has_issues(("jonmorehouse/github-issues.vim")))
if __name__ == "__main__":
unittest.main()
|
76282391f35725ee42ac0671a9a77b68e1f34081
|
rest_tester/test_info.py
|
rest_tester/test_info.py
|
class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data[cls.PATH_API_PARAMS] if cls.PATH_API_PARAMS in api_data.keys() else {}
timeout = api_data[cls.PATH_API_TIMEOUT] if cls.PATH_API_TIMEOUT in api_data.keys() else cls.DEFAULT_TIME_OUT
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
|
class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
"""Use get for only 'params' and 'timeout' to raise KeyError if keys do not exist."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data.get(cls.PATH_API_PARAMS, {})
timeout = api_data.get(cls.PATH_API_TIMEOUT, cls.DEFAULT_TIME_OUT)
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
|
Use get for params and timeout.
|
Use get for params and timeout.
|
Python
|
mit
|
ridibooks/lightweight-rest-tester,ridibooks/lightweight-rest-tester
|
class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data[cls.PATH_API_PARAMS] if cls.PATH_API_PARAMS in api_data.keys() else {}
timeout = api_data[cls.PATH_API_TIMEOUT] if cls.PATH_API_TIMEOUT in api_data.keys() else cls.DEFAULT_TIME_OUT
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
Use get for params and timeout.
|
class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
"""Use get for only 'params' and 'timeout' to raise KeyError if keys do not exist."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data.get(cls.PATH_API_PARAMS, {})
timeout = api_data.get(cls.PATH_API_TIMEOUT, cls.DEFAULT_TIME_OUT)
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
|
<commit_before>class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data[cls.PATH_API_PARAMS] if cls.PATH_API_PARAMS in api_data.keys() else {}
timeout = api_data[cls.PATH_API_TIMEOUT] if cls.PATH_API_TIMEOUT in api_data.keys() else cls.DEFAULT_TIME_OUT
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
<commit_msg>Use get for params and timeout.<commit_after>
|
class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
"""Use get for only 'params' and 'timeout' to raise KeyError if keys do not exist."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data.get(cls.PATH_API_PARAMS, {})
timeout = api_data.get(cls.PATH_API_TIMEOUT, cls.DEFAULT_TIME_OUT)
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
|
class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data[cls.PATH_API_PARAMS] if cls.PATH_API_PARAMS in api_data.keys() else {}
timeout = api_data[cls.PATH_API_TIMEOUT] if cls.PATH_API_TIMEOUT in api_data.keys() else cls.DEFAULT_TIME_OUT
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
Use get for params and timeout.class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
"""Use get for only 'params' and 'timeout' to raise KeyError if keys do not exist."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data.get(cls.PATH_API_PARAMS, {})
timeout = api_data.get(cls.PATH_API_TIMEOUT, cls.DEFAULT_TIME_OUT)
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
|
<commit_before>class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data[cls.PATH_API_PARAMS] if cls.PATH_API_PARAMS in api_data.keys() else {}
timeout = api_data[cls.PATH_API_TIMEOUT] if cls.PATH_API_TIMEOUT in api_data.keys() else cls.DEFAULT_TIME_OUT
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
<commit_msg>Use get for params and timeout.<commit_after>class TestInfo(object):
"""Read test information from JSON data."""
PATH_API = 'api'
PATH_API_URL = 'url'
PATH_API_PARAMS = 'params'
PATH_API_TIMEOUT = 'timeout'
PATH_TESTS = 'tests'
DEFAULT_TIME_OUT = 10
@classmethod
def read(cls, json_data):
"""Read test information from JSON data."""
"""Use get for only 'params' and 'timeout' to raise KeyError if keys do not exist."""
api_data = json_data[cls.PATH_API]
url = api_data[cls.PATH_API_URL]
params = api_data.get(cls.PATH_API_PARAMS, {})
timeout = api_data.get(cls.PATH_API_TIMEOUT, cls.DEFAULT_TIME_OUT)
test_cases = json_data[cls.PATH_TESTS]
return url, params, timeout, test_cases
|
fecb9624379057a98aeaf2bb5cf42d7e526bbf0a
|
vumi/blinkenlights/heartbeat/__init__.py
|
vumi/blinkenlights/heartbeat/__init__.py
|
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
|
"""Vumi worker heartbeating."""
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
|
Add module docstring from vumi.blinkenlights.heartbeat.
|
Add module docstring from vumi.blinkenlights.heartbeat.
|
Python
|
bsd-3-clause
|
vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,TouK/vumi,TouK/vumi
|
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
Add module docstring from vumi.blinkenlights.heartbeat.
|
"""Vumi worker heartbeating."""
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
|
<commit_before>
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
<commit_msg>Add module docstring from vumi.blinkenlights.heartbeat.<commit_after>
|
"""Vumi worker heartbeating."""
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
|
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
Add module docstring from vumi.blinkenlights.heartbeat."""Vumi worker heartbeating."""
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
|
<commit_before>
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
<commit_msg>Add module docstring from vumi.blinkenlights.heartbeat.<commit_after>"""Vumi worker heartbeating."""
from vumi.blinkenlights.heartbeat.publisher import (HeartBeatMessage,
HeartBeatPublisher)
__all__ = ["HeartBeatMessage", "HeartBeatPublisher"]
|
fbf2a59d9cf25c3d3a041afa839d0d44f6f385a5
|
win_unc/internal/utils.py
|
win_unc/internal/utils.py
|
"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
|
"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
return None
|
Return None explicitly instead of implicitly
|
Return None explicitly instead of implicitly
|
Python
|
mit
|
CovenantEyes/py_win_unc,nithinphilips/py_win_unc
|
"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
Return None explicitly instead of implicitly
|
"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
return None
|
<commit_before>"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
<commit_msg>Return None explicitly instead of implicitly<commit_after>
|
"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
return None
|
"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
Return None explicitly instead of implicitly"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
return None
|
<commit_before>"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
<commit_msg>Return None explicitly instead of implicitly<commit_after>"""
Contains generic helper funcitons to aid in parsing.
"""
import itertools
def take_while(predicate, items):
return list(itertools.takewhile(predicate, items))
def drop_while(predicate, items):
return list(itertools.dropwhile(predicate, items))
def not_(func):
return lambda *args, **kwargs: not func(*args, **kwargs)
def first(predicate, iterable):
for item in iterable:
if predicate(item):
return item
return None
def rfirst(predicate, iterable):
return first(predicate, reversed(list(iterable)))
def catch(func, *args, **kwargs):
"""
Executes `func` with `args` and `kwargs` as arguments. If `func` throws an error, this function
returns the error, otherwise it returns `None`.
"""
try:
func(*args, **kwargs)
except Exception as error:
return error
return None
|
62a6b78b62631c0b1de7d0497250aa3d0310d47d
|
winthrop/common/models.py
|
winthrop/common/models.py
|
from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
|
from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
ordering = ['name']
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
|
Add alpha ordering on Named abstract class
|
Add alpha ordering on Named abstract class
|
Python
|
apache-2.0
|
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
|
from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
Add alpha ordering on Named abstract class
|
from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
ordering = ['name']
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
|
<commit_before>from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
<commit_msg>Add alpha ordering on Named abstract class<commit_after>
|
from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
ordering = ['name']
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
|
from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
Add alpha ordering on Named abstract classfrom django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
ordering = ['name']
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
|
<commit_before>from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
<commit_msg>Add alpha ordering on Named abstract class<commit_after>from django.db import models
# abstract models with common fields to be
# used as mix-ins
class Named(models.Model):
'''Abstract model with a 'name' field; by default, name is used as
the string display.'''
name = models.CharField(max_length=255, unique=True)
class Meta:
abstract = True
ordering = ['name']
def __str__(self):
return self.name
class Notable(models.Model):
'''Abstract model with an optional notes text field'''
notes = models.TextField(blank=True)
class Meta:
abstract = True
class DateRange(models.Model):
'''Abstract model with optional start and end years, and a
custom dates property to display the date range nicely.'''
start_year = models.PositiveIntegerField(null=True, blank=True)
end_year = models.PositiveIntegerField(null=True, blank=True)
class Meta:
abstract = True
@property
def dates(self):
'''Date or date range as a string for display'''
# if no dates are set, return an empty string
if not self.start_year and not self.end_year:
return ''
# if start and end year are the same just return one year
if self.start_year == self.end_year:
return self.start_year
date_parts = [self.start_year, '-', self.end_year]
return ''.join([str(dp) for dp in date_parts if dp is not None])
|
09506e7ae8dbc1ad06b35c075e15946dd2c6092b
|
examples/my_test_suite.py
|
examples/my_test_suite.py
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
Update the example test suite
|
Update the example test suite
|
Python
|
mit
|
possoumous/Watchers,mdmintz/SeleniumBase,possoumous/Watchers,seleniumbase/SeleniumBase,mdmintz/seleniumspot,ktp420/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,ktp420/SeleniumBase,ktp420/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,ktp420/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,possoumous/Watchers,possoumous/Watchers,mdmintz/SeleniumBase
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
Update the example test suite
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
<commit_before>from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
<commit_msg>Update the example test suite<commit_after>
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
Update the example test suitefrom seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
<commit_before>from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
<commit_msg>Update the example test suite<commit_after>from seleniumbase import BaseCase
class MyTestSuite(BaseCase):
def test_1(self):
self.open("http://xkcd.com/1663/")
self.find_text("Garden", "div#ctitle", timeout=3)
for p in xrange(4):
self.click('a[rel="next"]')
self.find_text("Algorithms", "div#ctitle", timeout=3)
def test_2(self):
# This test will fail
self.open("http://xkcd.com/1675/")
raise Exception("FAKE EXCEPTION: This test fails on purpose.")
def test_3(self):
self.open("http://xkcd.com/1406/")
self.find_text("Universal Converter Box", "div#ctitle", timeout=3)
self.open("http://xkcd.com/608/")
self.find_text("Form", "div#ctitle", timeout=3)
def test_4(self):
# This test will fail
self.open("http://xkcd.com/1670/")
self.find_element("FakeElement.DoesNotExist", timeout=0.5)
|
5aca109f486786266164f4ac7a10e4d76f0730e4
|
scrappyr/scraps/forms.py
|
scrappyr/scraps/forms.py
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.fields['raw_title'].label = 'New scrap title'
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
Make add-scrap title user friendly
|
Make add-scrap title user friendly
|
Python
|
mit
|
tonysyu/scrappyr-app,tonysyu/scrappyr-app,tonysyu/scrappyr-app,tonysyu/scrappyr-app
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
Make add-scrap title user friendly
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.fields['raw_title'].label = 'New scrap title'
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
<commit_before>from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
<commit_msg>Make add-scrap title user friendly<commit_after>
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.fields['raw_title'].label = 'New scrap title'
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
Make add-scrap title user friendlyfrom crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.fields['raw_title'].label = 'New scrap title'
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
<commit_before>from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
<commit_msg>Make add-scrap title user friendly<commit_after>from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django import forms
from .models import Scrap
class ScrapForm(forms.ModelForm):
class Meta:
model = Scrap
fields = ['raw_title']
def __init__(self, *args, **kwargs):
super(ScrapForm, self).__init__(*args, **kwargs)
self.fields['raw_title'].label = 'New scrap title'
self.helper = FormHelper()
self.helper.form_class = 'col-md-4'
self.helper.add_input(Submit('submit', 'Add Scrap', css_class='btn-sm'))
|
bdb38a935dbbe6b70b0b960ba132dc6870455ceb
|
validate.py
|
validate.py
|
"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType
)
def main():
load()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
|
"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType, setup,
options
)
def main():
load()
setup()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
sb = options.start_building
if not sb.recruits:
print(f'Start building {sb.get_name()} cannot recruit anything.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
|
Check that start building can recruit something.
|
Check that start building can recruit something.
|
Python
|
mpl-2.0
|
chrisnorman7/pyrts,chrisnorman7/pyrts,chrisnorman7/pyrts
|
"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType
)
def main():
load()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
Check that start building can recruit something.
|
"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType, setup,
options
)
def main():
load()
setup()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
sb = options.start_building
if not sb.recruits:
print(f'Start building {sb.get_name()} cannot recruit anything.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
|
<commit_before>"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType
)
def main():
load()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
<commit_msg>Check that start building can recruit something.<commit_after>
|
"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType, setup,
options
)
def main():
load()
setup()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
sb = options.start_building
if not sb.recruits:
print(f'Start building {sb.get_name()} cannot recruit anything.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
|
"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType
)
def main():
load()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
Check that start building can recruit something."""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType, setup,
options
)
def main():
load()
setup()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
sb = options.start_building
if not sb.recruits:
print(f'Start building {sb.get_name()} cannot recruit anything.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
|
<commit_before>"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType
)
def main():
load()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
<commit_msg>Check that start building can recruit something.<commit_after>"""Check for inconsistancies in the database."""
from server.db import (
BuildingBuilder, BuildingRecruit, BuildingType, load, UnitType, setup,
options
)
def main():
load()
setup()
for name in UnitType.resource_names():
if UnitType.count(getattr(UnitType, name) >= 1):
continue
else:
print(f'There is no unit that can gather {name}.')
for bt in BuildingType.all():
if not BuildingBuilder.count(building_type_id=bt.id):
print(f'There is no way to build {bt.name}.')
for ut in UnitType.all():
if not BuildingRecruit.count(unit_type_id=ut.id):
print(f'There is no way to recruit {ut.get_name()}.')
sb = options.start_building
if not sb.recruits:
print(f'Start building {sb.get_name()} cannot recruit anything.')
if __name__ == '__main__':
try:
main()
except FileNotFoundError:
print('No database file exists.')
|
15efe5ecd3f17ec05f3dc9054cd823812c4b3743
|
utils/http.py
|
utils/http.py
|
import requests
def retrieve_json(url):
r = requests.get(url)
r.raise_for_status()
return r.json()
|
import requests
DEFAULT_TIMEOUT = 10
def retrieve_json(url, timeout=DEFAULT_TIMEOUT):
r = requests.get(url, timeout=timeout)
r.raise_for_status()
return r.json()
|
Add a default timeout parameter to retrieve_json
|
Add a default timeout parameter to retrieve_json
|
Python
|
bsd-3-clause
|
tebriel/dd-agent,jyogi/purvar-agent,pmav99/praktoras,gphat/dd-agent,manolama/dd-agent,pmav99/praktoras,gphat/dd-agent,Wattpad/dd-agent,brettlangdon/dd-agent,cberry777/dd-agent,pmav99/praktoras,brettlangdon/dd-agent,tebriel/dd-agent,tebriel/dd-agent,Wattpad/dd-agent,jyogi/purvar-agent,cberry777/dd-agent,manolama/dd-agent,gphat/dd-agent,takus/dd-agent,tebriel/dd-agent,gphat/dd-agent,takus/dd-agent,cberry777/dd-agent,Wattpad/dd-agent,c960657/dd-agent,c960657/dd-agent,Wattpad/dd-agent,indeedops/dd-agent,takus/dd-agent,indeedops/dd-agent,tebriel/dd-agent,jyogi/purvar-agent,Wattpad/dd-agent,takus/dd-agent,gphat/dd-agent,cberry777/dd-agent,takus/dd-agent,brettlangdon/dd-agent,pmav99/praktoras,jyogi/purvar-agent,c960657/dd-agent,c960657/dd-agent,brettlangdon/dd-agent,indeedops/dd-agent,manolama/dd-agent,brettlangdon/dd-agent,c960657/dd-agent,manolama/dd-agent,indeedops/dd-agent,pmav99/praktoras,jyogi/purvar-agent,cberry777/dd-agent,manolama/dd-agent,indeedops/dd-agent
|
import requests
def retrieve_json(url):
r = requests.get(url)
r.raise_for_status()
return r.json()
Add a default timeout parameter to retrieve_json
|
import requests
DEFAULT_TIMEOUT = 10
def retrieve_json(url, timeout=DEFAULT_TIMEOUT):
r = requests.get(url, timeout=timeout)
r.raise_for_status()
return r.json()
|
<commit_before>import requests
def retrieve_json(url):
r = requests.get(url)
r.raise_for_status()
return r.json()
<commit_msg>Add a default timeout parameter to retrieve_json<commit_after>
|
import requests
DEFAULT_TIMEOUT = 10
def retrieve_json(url, timeout=DEFAULT_TIMEOUT):
r = requests.get(url, timeout=timeout)
r.raise_for_status()
return r.json()
|
import requests
def retrieve_json(url):
r = requests.get(url)
r.raise_for_status()
return r.json()
Add a default timeout parameter to retrieve_jsonimport requests
DEFAULT_TIMEOUT = 10
def retrieve_json(url, timeout=DEFAULT_TIMEOUT):
r = requests.get(url, timeout=timeout)
r.raise_for_status()
return r.json()
|
<commit_before>import requests
def retrieve_json(url):
r = requests.get(url)
r.raise_for_status()
return r.json()
<commit_msg>Add a default timeout parameter to retrieve_json<commit_after>import requests
DEFAULT_TIMEOUT = 10
def retrieve_json(url, timeout=DEFAULT_TIMEOUT):
r = requests.get(url, timeout=timeout)
r.raise_for_status()
return r.json()
|
fe89b50d87c37c83170de74e5f88f59d88ba2c89
|
vispy/visuals/tests/test_arrows.py
|
vispy/visuals/tests/test_arrows.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
vertices += 0.33
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
|
Add 0.33 to the vertices to prevent misalignment
|
Add 0.33 to the vertices to prevent misalignment
|
Python
|
bsd-3-clause
|
michaelaye/vispy,ghisvail/vispy,QuLogic/vispy,jdreaver/vispy,Eric89GXL/vispy,srinathv/vispy,dchilds7/Deysha-Star-Formation,jay3sh/vispy,sbtlaarzc/vispy,jay3sh/vispy,julienr/vispy,sbtlaarzc/vispy,michaelaye/vispy,jay3sh/vispy,bollu/vispy,drufat/vispy,RebeccaWPerry/vispy,julienr/vispy,Eric89GXL/vispy,bollu/vispy,jdreaver/vispy,bollu/vispy,srinathv/vispy,kkuunnddaannkk/vispy,dchilds7/Deysha-Star-Formation,RebeccaWPerry/vispy,ghisvail/vispy,julienr/vispy,drufat/vispy,QuLogic/vispy,jdreaver/vispy,inclement/vispy,RebeccaWPerry/vispy,QuLogic/vispy,Eric89GXL/vispy,inclement/vispy,drufat/vispy,srinathv/vispy,sbtlaarzc/vispy,ghisvail/vispy,kkuunnddaannkk/vispy,kkuunnddaannkk/vispy,dchilds7/Deysha-Star-Formation,michaelaye/vispy,inclement/vispy
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
Add 0.33 to the vertices to prevent misalignment
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
vertices += 0.33
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
<commit_msg>Add 0.33 to the vertices to prevent misalignment<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
vertices += 0.33
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
Add 0.33 to the vertices to prevent misalignment# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
vertices += 0.33
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
<commit_msg>Add 0.33 to the vertices to prevent misalignment<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from vispy.visuals.line.arrow import ARROW_TYPES
from vispy.scene import visuals, transforms
from vispy.testing import (requires_application, TestingCanvas,
run_tests_if_main, raises)
from vispy.testing.image_tester import assert_image_approved
@requires_application()
def test_line_draw():
"""Test drawing arrows without transforms"""
vertices = np.array([
[25, 25],
[25, 75],
[50, 25],
[50, 75],
[75, 25],
[75, 75]
]).astype('f32')
vertices += 0.33
arrows = np.array([
vertices[:2],
vertices[3:1:-1],
vertices[4:],
vertices[-1:-3:-1]
]).reshape((4, 4))
with TestingCanvas() as c:
for arrow_type in ARROW_TYPES:
arrow = visuals.Arrow(pos=vertices, arrow_type=arrow_type,
arrows=arrows, arrow_size=10, color='red',
connect="segments", parent=c.scene)
assert_image_approved(c.render(), 'visuals/arrow_type_%s.png' %
arrow_type)
arrow.parent = None
run_tests_if_main()
|
a75ca43b3035f3f391b39393802ea46d440b22c5
|
bookvoyage-backend/core/admin.py
|
bookvoyage-backend/core/admin.py
|
from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register your models here.
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
|
from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register models
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
from django.contrib.auth.models import User
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
class UserResource(resources.ModelResource):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email', 'groups', 'username')
import_id_fields = ['username']
skip_unchanged = True
class UserAdmin(ImportExportModelAdmin):
resource_class = UserResource
pass
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
Add option to bulk-add users
|
Add option to bulk-add users
Warning: excel import is shaky with importing groups; json import is recommended.
|
Python
|
mit
|
edushifts/book-voyage,edushifts/book-voyage,edushifts/book-voyage,edushifts/book-voyage
|
from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register your models here.
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
Add option to bulk-add users
Warning: excel import is shaky with importing groups; json import is recommended.
|
from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register models
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
from django.contrib.auth.models import User
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
class UserResource(resources.ModelResource):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email', 'groups', 'username')
import_id_fields = ['username']
skip_unchanged = True
class UserAdmin(ImportExportModelAdmin):
resource_class = UserResource
pass
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
<commit_before>from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register your models here.
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
<commit_msg>Add option to bulk-add users
Warning: excel import is shaky with importing groups; json import is recommended.<commit_after>
|
from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register models
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
from django.contrib.auth.models import User
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
class UserResource(resources.ModelResource):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email', 'groups', 'username')
import_id_fields = ['username']
skip_unchanged = True
class UserAdmin(ImportExportModelAdmin):
resource_class = UserResource
pass
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register your models here.
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
Add option to bulk-add users
Warning: excel import is shaky with importing groups; json import is recommended.from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register models
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
from django.contrib.auth.models import User
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
class UserResource(resources.ModelResource):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email', 'groups', 'username')
import_id_fields = ['username']
skip_unchanged = True
class UserAdmin(ImportExportModelAdmin):
resource_class = UserResource
pass
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
<commit_before>from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register your models here.
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
<commit_msg>Add option to bulk-add users
Warning: excel import is shaky with importing groups; json import is recommended.<commit_after>from leaflet.admin import LeafletGeoAdmin
from django.contrib import admin
from import_export import resources
from import_export.admin import ImportExportModelAdmin
# Register models
from .models import Author, Book, BookInstance, BookHolding, BookOwning, BookBatch
from django.contrib.auth.models import User
class BookResource(resources.ModelResource):
class Meta:
model = Book
class BookAdmin(ImportExportModelAdmin):
resource_class = BookResource
class BookCodeResource(resources.ModelResource):
class Meta:
model = BookInstance
exclude = ('id','ownings', 'holdings', 'arrived')
import_id_fields = ['book_code']
skip_unchanged = True
class BookCodeAdmin(ImportExportModelAdmin):
resource_class = BookCodeResource
class UserResource(resources.ModelResource):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email', 'groups', 'username')
import_id_fields = ['username']
skip_unchanged = True
class UserAdmin(ImportExportModelAdmin):
resource_class = UserResource
pass
admin.site.register(Author)
admin.site.register(Book, BookAdmin)
admin.site.register(BookInstance, BookCodeAdmin)
admin.site.register(BookHolding, LeafletGeoAdmin)
admin.site.register(BookOwning, LeafletGeoAdmin)
admin.site.register(BookBatch, LeafletGeoAdmin)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
|
3e1f8a567e9d7fa7bb7ac5acf8fe336b88faeeaa
|
expressions/python/setup.py
|
expressions/python/setup.py
|
from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
|
from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
package_data={'expressions': ['month.aliases']},
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
|
Declare package_data to ensure month.aliases is included
|
Declare package_data to ensure month.aliases is included
|
Python
|
bsd-3-clause
|
rapidpro/flows
|
from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
Declare package_data to ensure month.aliases is included
|
from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
package_data={'expressions': ['month.aliases']},
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
|
<commit_before>from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
<commit_msg>Declare package_data to ensure month.aliases is included<commit_after>
|
from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
package_data={'expressions': ['month.aliases']},
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
|
from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
Declare package_data to ensure month.aliases is includedfrom setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
package_data={'expressions': ['month.aliases']},
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
|
<commit_before>from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
<commit_msg>Declare package_data to ensure month.aliases is included<commit_after>from setuptools import setup, find_packages
def _is_requirement(line):
"""Returns whether the line is a valid package requirement."""
line = line.strip()
return line and not (line.startswith("-r") or line.startswith("#"))
def _read_requirements(filename):
"""Returns a list of package requirements read from the file."""
requirements_file = open(filename).read()
return [line.strip() for line in requirements_file.splitlines()
if _is_requirement(line)]
required_packages = _read_requirements("requirements/base.txt")
test_packages = _read_requirements("requirements/tests.txt")
setup(
name='rapidpro-expressions',
version='1.0',
description='Python implementation of the RapidPro expression and templating system',
url='https://github.com/rapidpro/flows',
author='Nyaruka',
author_email='code@nyaruka.com',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
],
keywords='rapidpro templating',
packages=find_packages(),
package_data={'expressions': ['month.aliases']},
install_requires=required_packages,
test_suite='nose.collector',
tests_require=required_packages + test_packages,
)
|
a8b0a1f20264506beec9ffc1299b82277a339556
|
chipy_org/apps/profiles/views.py
|
chipy_org/apps/profiles/views.py
|
from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
|
from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
ordering = ["profile__display_name"]
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
|
Add ordering for profiles by name
|
Add ordering for profiles by name
|
Python
|
mit
|
chicagopython/chipy.org,chicagopython/chipy.org,chicagopython/chipy.org,chicagopython/chipy.org
|
from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
Add ordering for profiles by name
|
from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
ordering = ["profile__display_name"]
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
|
<commit_before>from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
<commit_msg>Add ordering for profiles by name<commit_after>
|
from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
ordering = ["profile__display_name"]
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
|
from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
Add ordering for profiles by namefrom django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
ordering = ["profile__display_name"]
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
|
<commit_before>from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
<commit_msg>Add ordering for profiles by name<commit_after>from django.contrib.auth.models import User
from django.views.generic import ListView, UpdateView
from .forms import ProfileForm
from .models import UserProfile
class ProfilesList(ListView):
context_object_name = "profiles"
template_name = "profiles/list.html"
queryset = User.objects.filter(profile__show=True)
class ProfilesListOrganizers(ListView):
context_object_name = "organizers"
template_name = "profiles/organizers.html"
queryset = UserProfile.user_organizers()
ordering = ["profile__display_name"]
class ProfileEdit(UpdateView):
form_class = ProfileForm
template_name = "profiles/edit.html"
success_url = "/"
def get_object(self, queryset=None):
return UserProfile.objects.get(user=self.request.user)
|
fbe4761d2d679a983d2625c4969dab53500634b7
|
fases/rodar_fase_exemplo.py
|
fases/rodar_fase_exemplo.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__=='__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)
|
# -*- coding: utf-8 -*-
from os import path
import sys
project_dir = path.dirname(__file__)
project_dir = path.join('..')
sys.path.append(project_dir)
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__ == '__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)
|
Refactor para funfar via linha de comando
|
Refactor para funfar via linha de comando
|
Python
|
mit
|
guoliveer/pythonbirds,deniscampos/pythonbirds,renzon/pythonbirds-fatec,giovaneliberato/python_birds_fp,pythonprobr/pythonbirds,jvitorlb/pythonbirds,evertongoncalves/pythonbirds,renzon/python-birds-t5,Cleitoon1/pythonbirds,gomesfelipe/pythonbirds,igorlimasan/pythonbirds
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__=='__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)Refactor para funfar via linha de comando
|
# -*- coding: utf-8 -*-
from os import path
import sys
project_dir = path.dirname(__file__)
project_dir = path.join('..')
sys.path.append(project_dir)
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__ == '__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__=='__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)<commit_msg>Refactor para funfar via linha de comando<commit_after>
|
# -*- coding: utf-8 -*-
from os import path
import sys
project_dir = path.dirname(__file__)
project_dir = path.join('..')
sys.path.append(project_dir)
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__ == '__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__=='__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)Refactor para funfar via linha de comando# -*- coding: utf-8 -*-
from os import path
import sys
project_dir = path.dirname(__file__)
project_dir = path.join('..')
sys.path.append(project_dir)
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__ == '__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__=='__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)<commit_msg>Refactor para funfar via linha de comando<commit_after># -*- coding: utf-8 -*-
from os import path
import sys
project_dir = path.dirname(__file__)
project_dir = path.join('..')
sys.path.append(project_dir)
from atores import PassaroAmarelo, PassaroVermelho, Obstaculo, Porco
from fase import Fase
from placa_grafica_tkinter import rodar_fase
if __name__ == '__main__':
fase = Fase(intervalo_de_colisao=10)
# Adicionar Pássaros Vermelhos
for i in range(5):
fase.adicionar_passaro(PassaroVermelho(30, 30))
# Adicionar Pássaros Amarelos
for i in range(30):
fase.adicionar_passaro(PassaroAmarelo(30, 30))
# Obstaculos
for i in range(30, 480, 32):
fase.adicionar_obstaculo(Obstaculo(300, i))
# Porcos
for i in range(30, 300, 32):
fase.adicionar_porco(Porco(600, i))
rodar_fase(fase)
|
a0df14a38bcc4f71cf073298e078160488b143ca
|
sheldon/basic_classes.py
|
sheldon/basic_classes.py
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object or string with message text
:return:
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object
:return: True or False - result of sending
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
|
Change docs of send_message method
|
Change docs of send_message method
|
Python
|
mit
|
lises/sheldon
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object or string with message text
:return:
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
Change docs of send_message method
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object
:return: True or False - result of sending
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
|
<commit_before># -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object or string with message text
:return:
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
<commit_msg>Change docs of send_message method<commit_after>
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object
:return: True or False - result of sending
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
|
# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object or string with message text
:return:
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
Change docs of send_message method# -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object
:return: True or False - result of sending
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
|
<commit_before># -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object or string with message text
:return:
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
<commit_msg>Change docs of send_message method<commit_after># -*- coding: utf-8 -*-
"""
Declaration of classes needed for bot working:
Adapter class, Plugin class
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from time import sleep
class Adapter:
"""
Adapter class contains information about adapter:
name, variables and module using to call adapter methods
"""
def __init__(self, name, variables):
"""
Init new Adapter object
:param name: public name of adapter which used in
config/adapters directory
:param variables: variables of adapters which set in config file.
Example of adapter variable - Slack API key.
"""
self.name = name
self.variables = variables
# Load module of adapter later
self.module = None
def get_messages(self):
"""
Get new messages from adapter
:return: iterator of Message objects
"""
while True:
messages = self.module.get_messages()
for message in messages:
yield message
# Sleep seconds, which set in adapter config
sleep(int(self.variables['timeout']))
def send_message(self, message):
"""
Send message with adapter
:param message: Message object
:return: True or False - result of sending
"""
pass
class Message:
"""
Class for every message: incoming and outcoming.
"""
def __init__(self, message_text, adapter):
self.text = message_text
self.adapter = adapter
|
07ccbc36fd5148db2efc5f676fd13d4b24aa004f
|
hackasmlexer/hacklexer.py
|
hackasmlexer/hacklexer.py
|
import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
|
import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses
(r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses
(r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
|
Add register and IO addresses
|
Add register and IO addresses
|
Python
|
mit
|
cprieto/pygments_hack_asm
|
import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
Add register and IO addresses
|
import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses
(r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses
(r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
|
<commit_before>import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
<commit_msg>Add register and IO addresses<commit_after>
|
import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses
(r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses
(r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
|
import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
Add register and IO addressesimport re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses
(r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses
(r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
|
<commit_before>import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
<commit_msg>Add register and IO addresses<commit_after>import re
from pygments.lexer import RegexLexer, include
from pygments.token import *
class HackAsmLexer(RegexLexer):
name = 'Hack Assembler'
aliases = ['hack_asm']
filenames = ['*.asm']
identifier = r'[a-zA-Z$._?][a-zA-Z0-9$._?]*'
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
(r'\(' + identifier + '\)', Name.Label),
(r'[+-=;&|!]+', Operator),
(r'\/\/.+$', Comment),
(r'[\r\n]+', Text),
(r'@[A-Za-z][A-Za-z0-9]+', Name.Variable),
(r'\b(JGT|JEQ|JGE|JLT|JNE|JLE|JMP)\b', Keyword),
(r'\b@(SCREEN|KBD)\b', Name.Builtin.Pseudo), # I/O addresses
(r'\b@(R0|R1|R2|R3|R4|R5|R6|R7|R8|R9|R10|R11|R12|R13|R14|R15)\b', Name.Builtin.Pseudo), # RAM Addresses
(r'\b@(SP|LCL|ARG|THIS|THAT)\b', Name.Builtin.Pseudo), # Parameter addresses
(r'null', Keyword.Pseudo),
(r'\b(D|M|MD|A|AM|AD|AMD)\b', Name.Builtin),
(r'@[0-9]+', Name.Constant)
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
(r'#.*?\n', Comment)
]
}
|
6b07710ae8c7681f58060c15c74bf5bd3dda4f3b
|
handroll/composers/txt.py
|
handroll/composers/txt.py
|
# Copyright (c) 2014, Matt Layman
import textile
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def _generate_content(self, source):
return textile.textile(source)
|
# Copyright (c) 2014, Matt Layman
import sys
try:
import textile
except ImportError:
# FIXME: textile not supported on Python 3.
pass
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def compose(self, template, source_file, out_dir):
# Python 2.6 does not recognize the `major` attribute of version info.
if sys.version_info[0] == 3:
logger.error('Sorry. Textile does not yet support Python 3.')
return
super(TextileComposer, self).compose(template, source_file, out_dir)
def _generate_content(self, source):
return textile.textile(source)
|
Revert "Textile is working with Python 3 now."
|
Revert "Textile is working with Python 3 now."
This reverts commit 849316dd5bffa9132608cc9eeac63b08188f31c0.
Textile is still having issues with Python 3.2 so rollback the support until it
is fixed.
|
Python
|
bsd-2-clause
|
handroll/handroll
|
# Copyright (c) 2014, Matt Layman
import textile
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def _generate_content(self, source):
return textile.textile(source)
Revert "Textile is working with Python 3 now."
This reverts commit 849316dd5bffa9132608cc9eeac63b08188f31c0.
Textile is still having issues with Python 3.2 so rollback the support until it
is fixed.
|
# Copyright (c) 2014, Matt Layman
import sys
try:
import textile
except ImportError:
# FIXME: textile not supported on Python 3.
pass
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def compose(self, template, source_file, out_dir):
# Python 2.6 does not recognize the `major` attribute of version info.
if sys.version_info[0] == 3:
logger.error('Sorry. Textile does not yet support Python 3.')
return
super(TextileComposer, self).compose(template, source_file, out_dir)
def _generate_content(self, source):
return textile.textile(source)
|
<commit_before># Copyright (c) 2014, Matt Layman
import textile
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def _generate_content(self, source):
return textile.textile(source)
<commit_msg>Revert "Textile is working with Python 3 now."
This reverts commit 849316dd5bffa9132608cc9eeac63b08188f31c0.
Textile is still having issues with Python 3.2 so rollback the support until it
is fixed.<commit_after>
|
# Copyright (c) 2014, Matt Layman
import sys
try:
import textile
except ImportError:
# FIXME: textile not supported on Python 3.
pass
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def compose(self, template, source_file, out_dir):
# Python 2.6 does not recognize the `major` attribute of version info.
if sys.version_info[0] == 3:
logger.error('Sorry. Textile does not yet support Python 3.')
return
super(TextileComposer, self).compose(template, source_file, out_dir)
def _generate_content(self, source):
return textile.textile(source)
|
# Copyright (c) 2014, Matt Layman
import textile
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def _generate_content(self, source):
return textile.textile(source)
Revert "Textile is working with Python 3 now."
This reverts commit 849316dd5bffa9132608cc9eeac63b08188f31c0.
Textile is still having issues with Python 3.2 so rollback the support until it
is fixed.# Copyright (c) 2014, Matt Layman
import sys
try:
import textile
except ImportError:
# FIXME: textile not supported on Python 3.
pass
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def compose(self, template, source_file, out_dir):
# Python 2.6 does not recognize the `major` attribute of version info.
if sys.version_info[0] == 3:
logger.error('Sorry. Textile does not yet support Python 3.')
return
super(TextileComposer, self).compose(template, source_file, out_dir)
def _generate_content(self, source):
return textile.textile(source)
|
<commit_before># Copyright (c) 2014, Matt Layman
import textile
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def _generate_content(self, source):
return textile.textile(source)
<commit_msg>Revert "Textile is working with Python 3 now."
This reverts commit 849316dd5bffa9132608cc9eeac63b08188f31c0.
Textile is still having issues with Python 3.2 so rollback the support until it
is fixed.<commit_after># Copyright (c) 2014, Matt Layman
import sys
try:
import textile
except ImportError:
# FIXME: textile not supported on Python 3.
pass
from handroll import logger
from handroll.composers import GenericHTMLComposer
class TextileComposer(GenericHTMLComposer):
"""Compose HTML from Textile files (``.textile``).
The first line of the file will be used as the ``title`` data for the
template. All following lines will be converted to HTML and sent to the
template as the ``content`` data.
"""
def compose(self, template, source_file, out_dir):
# Python 2.6 does not recognize the `major` attribute of version info.
if sys.version_info[0] == 3:
logger.error('Sorry. Textile does not yet support Python 3.')
return
super(TextileComposer, self).compose(template, source_file, out_dir)
def _generate_content(self, source):
return textile.textile(source)
|
ff89cb8216168beb9c79028080fbccbe91c13000
|
masters/master.chromium.infra/master_site_config.py
|
masters/master.chromium.infra/master_site_config.py
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id= 1
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id = 11
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
|
Use 11 as master_port_id for labs convenience.
|
Use 11 as master_port_id for labs convenience.
R=phajdan.jr@chromium.org
BUG=449961
Review URL: https://codereview.chromium.org/935813002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294106 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id= 1
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
Use 11 as master_port_id for labs convenience.
R=phajdan.jr@chromium.org
BUG=449961
Review URL: https://codereview.chromium.org/935813002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294106 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id = 11
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id= 1
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
<commit_msg>Use 11 as master_port_id for labs convenience.
R=phajdan.jr@chromium.org
BUG=449961
Review URL: https://codereview.chromium.org/935813002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294106 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id = 11
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id= 1
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
Use 11 as master_port_id for labs convenience.
R=phajdan.jr@chromium.org
BUG=449961
Review URL: https://codereview.chromium.org/935813002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294106 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id = 11
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id= 1
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
<commit_msg>Use 11 as master_port_id for labs convenience.
R=phajdan.jr@chromium.org
BUG=449961
Review URL: https://codereview.chromium.org/935813002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294106 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class Infra(Master.Master1):
project_name = 'Infra'
master_port_id = 11
buildbot_url = 'https://build.chromium.org/p/chromium.infra/'
service_account_file = 'service-account-infra.json'
|
20224987f7c1ac34b13587a6dc7c1241e0466663
|
dataset/dataset/spiders/dataset_spider.py
|
dataset/dataset/spiders/dataset_spider.py
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
Fix import post merge to project directory
|
Fix import post merge to project directory
|
Python
|
mit
|
MaxLikelihood/CODE
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
Fix import post merge to project directory
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
<commit_before>from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
<commit_msg>Fix import post merge to project directory<commit_after>
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
Fix import post merge to project directoryfrom scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
<commit_before>from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from dataset import DatasetItem
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
<commit_msg>Fix import post merge to project directory<commit_after>from scrapy.contrib.spiders import CrawlSpider, Rule
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from .. import items
class DatasetSpider(CrawlSpider):
name = 'dataset'
allowed_domains = ['data.gc.ca/data/en']
start_urls = ['http://data.gc.ca/data/en/dataset?page=1']
rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']),
'parse_dataset')]
def parse_dataset(self, response):
sel = Selector(response)
dataset = items.DatasetItem()
dataset['url'] = response.url
dataset['name'] = sel.xpath("//div[@class='span-6']/article/div[@class='module'][1]/section[@class='module-content indent-large'][1]/h1/text()").extract()
dataset['frequency'] = sel.xpath("//div[@class='span-2']/aside[@class='secondary']/div[@class='module-related'][2]/ul[1]/li[@class='margin-bottom-medium']/text()").extract()
return dataset
|
6c0287a3ba1c98d9f4879c4f2ec95a3d6406b6ae
|
meinberlin/apps/dashboard/filtersets.py
|
meinberlin/apps/dashboard/filtersets.py
|
import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
typ = django_filters.CharFilter(
widget=views.TypeWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created', 'typ']
|
import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created']
|
Remove typ filter from dashboard
|
Remove typ filter from dashboard
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
typ = django_filters.CharFilter(
widget=views.TypeWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created', 'typ']
Remove typ filter from dashboard
|
import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created']
|
<commit_before>import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
typ = django_filters.CharFilter(
widget=views.TypeWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created', 'typ']
<commit_msg>Remove typ filter from dashboard<commit_after>
|
import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created']
|
import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
typ = django_filters.CharFilter(
widget=views.TypeWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created', 'typ']
Remove typ filter from dashboardimport django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created']
|
<commit_before>import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
typ = django_filters.CharFilter(
widget=views.TypeWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created', 'typ']
<commit_msg>Remove typ filter from dashboard<commit_after>import django_filters
from django.utils.translation import ugettext_lazy as _
from adhocracy4.filters import widgets as filters_widgets
from adhocracy4.filters.filters import DefaultsFilterSet
from adhocracy4.filters.filters import FreeTextFilter
from adhocracy4.projects.models import Project
from meinberlin.apps.projects import views
class FreeTextFilterWidget(filters_widgets.FreeTextFilterWidget):
label = _('Search')
class DashboardProjectFilterSet(DefaultsFilterSet):
defaults = {
'is_archived': 'false'
}
ordering = django_filters.OrderingFilter(
choices=(
('-created', _('Most recent')),
),
empty_label=None,
widget=views.OrderingWidget,
)
search = FreeTextFilter(
widget=FreeTextFilterWidget,
fields=['name']
)
is_archived = django_filters.BooleanFilter(
widget=views.ArchivedWidget
)
created = django_filters.NumberFilter(
name='created',
lookup_expr='year',
widget=views.YearWidget,
)
class Meta:
model = Project
fields = ['search', 'is_archived', 'created']
|
8f6a19bade1a0591f3feba4521fdf42c157c179d
|
skyline_path/algorithms/growing_graph.py
|
skyline_path/algorithms/growing_graph.py
|
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def growing(self):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return f'GrowingGraph(out:{self.outer_nodes}, in:{self.inner_nodes})'
|
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def all_nodes(self):
return self.outer_nodes | self.inner_nodes
def growing(self, times=1):
for _ in range(times):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return 'GrowingGraph(out:{}, in:{})'.format(
self.outer_nodes, self.inner_nodes
)
|
Add all_nodes and growing times param
|
Add all_nodes and growing times param
|
Python
|
mit
|
shadow3x3x3/renew-skyline-path-query
|
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def growing(self):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return f'GrowingGraph(out:{self.outer_nodes}, in:{self.inner_nodes})'Add all_nodes and growing times param
|
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def all_nodes(self):
return self.outer_nodes | self.inner_nodes
def growing(self, times=1):
for _ in range(times):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return 'GrowingGraph(out:{}, in:{})'.format(
self.outer_nodes, self.inner_nodes
)
|
<commit_before>class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def growing(self):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return f'GrowingGraph(out:{self.outer_nodes}, in:{self.inner_nodes})'<commit_msg>Add all_nodes and growing times param<commit_after>
|
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def all_nodes(self):
return self.outer_nodes | self.inner_nodes
def growing(self, times=1):
for _ in range(times):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return 'GrowingGraph(out:{}, in:{})'.format(
self.outer_nodes, self.inner_nodes
)
|
class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def growing(self):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return f'GrowingGraph(out:{self.outer_nodes}, in:{self.inner_nodes})'Add all_nodes and growing times paramclass GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def all_nodes(self):
return self.outer_nodes | self.inner_nodes
def growing(self, times=1):
for _ in range(times):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return 'GrowingGraph(out:{}, in:{})'.format(
self.outer_nodes, self.inner_nodes
)
|
<commit_before>class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def growing(self):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return f'GrowingGraph(out:{self.outer_nodes}, in:{self.inner_nodes})'<commit_msg>Add all_nodes and growing times param<commit_after>class GrowingGraph:
def __init__(self, neighbors_table, start_nodes):
self.neighbors_table = neighbors_table
self.outer_nodes = set(start_nodes)
self.inner_nodes = set()
def all_nodes(self):
return self.outer_nodes | self.inner_nodes
def growing(self, times=1):
for _ in range(times):
for old_node in self.outer_nodes.copy():
self._update_nodes(old_node)
def _update_nodes(self, old_node):
new_nodes = set(self.neighbors_table[old_node])
if new_nodes:
self.outer_nodes.remove(old_node)
self.inner_nodes.add(old_node)
for new_node in new_nodes:
if new_node not in self.inner_nodes:
self.outer_nodes.add(new_node)
def __str__(self):
return 'GrowingGraph(out:{}, in:{})'.format(
self.outer_nodes, self.inner_nodes
)
|
776814f7c5ef5b54167adbe3cce29b8e8381fd69
|
scripts/cluster/craq/start_craq_server.py
|
scripts/cluster/craq/start_craq_server.py
|
#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru28', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru27', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
Use servers for craq that are actually alive.
|
Use servers for craq that are actually alive.
|
Python
|
bsd-3-clause
|
sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata
|
#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru28', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
Use servers for craq that are actually alive.
|
#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru27', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
<commit_before>#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru28', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Use servers for craq that are actually alive.<commit_after>
|
#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru27', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru28', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
Use servers for craq that are actually alive.#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru27', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
<commit_before>#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru28', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Use servers for craq that are actually alive.<commit_after>#!/usr/bin/python
import sys
import subprocess
import time
import socket
def main():
if (not socket.gethostname() in ['meru27', 'meru29', 'meru30']):
return 0
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-32 -d meru -p 10333 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
2aab3167f70fa736fafb3507e71a6233a02363eb
|
space-age/space_age.py
|
space-age/space_age.py
|
class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
|
class SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
|
Implement __getattr__ to reduce code
|
Implement __getattr__ to reduce code
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
Implement __getattr__ to reduce code
|
class SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
|
<commit_before>class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
<commit_msg>Implement __getattr__ to reduce code<commit_after>
|
class SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
|
class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
Implement __getattr__ to reduce codeclass SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
|
<commit_before>class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
<commit_msg>Implement __getattr__ to reduce code<commit_after>class SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
|
47c50f9e3f8c0643e0e76cd60fa5694701e73afe
|
scanner/ScannerApplication.py
|
scanner/ScannerApplication.py
|
from Cura.Application import Application
class ScannerApplication(Application):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
|
from Cura.WxApplication import WxApplication
class ScannerApplication(WxApplication):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
super(ScannerApplication, self).run()
|
Use WxApplication as base class for the scanner
|
Use WxApplication as base class for the scanner
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
from Cura.Application import Application
class ScannerApplication(Application):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
Use WxApplication as base class for the scanner
|
from Cura.WxApplication import WxApplication
class ScannerApplication(WxApplication):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
super(ScannerApplication, self).run()
|
<commit_before>from Cura.Application import Application
class ScannerApplication(Application):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
<commit_msg>Use WxApplication as base class for the scanner<commit_after>
|
from Cura.WxApplication import WxApplication
class ScannerApplication(WxApplication):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
super(ScannerApplication, self).run()
|
from Cura.Application import Application
class ScannerApplication(Application):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
Use WxApplication as base class for the scannerfrom Cura.WxApplication import WxApplication
class ScannerApplication(WxApplication):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
super(ScannerApplication, self).run()
|
<commit_before>from Cura.Application import Application
class ScannerApplication(Application):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
<commit_msg>Use WxApplication as base class for the scanner<commit_after>from Cura.WxApplication import WxApplication
class ScannerApplication(WxApplication):
def __init__(self):
super(ScannerApplication, self).__init__()
self._plugin_registry.loadPlugin("STLReader")
self._plugin_registry.loadPlugin("STLWriter")
self._plugin_registry.loadPlugin("MeshView")
self._plugin_registry.loadPlugin("TransformTool")
def run(self):
print("Imma scanning ma laz0rs")
super(ScannerApplication, self).run()
|
e3804c302df761a4ebf7f2c7ed3e3c0bc8d079e7
|
grader/grader/__init__.py
|
grader/grader/__init__.py
|
import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
|
import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
"""Script entry point
.. todo::
Add a "verbose" flag
"""
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
|
Add a todo item to run()
|
Add a todo item to run()
|
Python
|
mit
|
redkyn/grader,grade-it/grader,redkyn/grader
|
import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
Add a todo item to run()
|
import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
"""Script entry point
.. todo::
Add a "verbose" flag
"""
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
|
<commit_before>import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
<commit_msg>Add a todo item to run()<commit_after>
|
import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
"""Script entry point
.. todo::
Add a "verbose" flag
"""
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
|
import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
Add a todo item to run()import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
"""Script entry point
.. todo::
Add a "verbose" flag
"""
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
|
<commit_before>import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
<commit_msg>Add a todo item to run()<commit_after>import argparse
import importlib
import logging
import os
logger = logging.getLogger(__name__)
description = "An automated grading tool for programming assignments."
subcommands = {
"init": "grader.init",
"new": "grader.new",
"image": "grader.image",
"grade": "grader.grade"
}
def run():
"""Script entry point
.. todo::
Add a "verbose" flag
"""
# Configure logging
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description=description)
parser.add_argument('--path', default=os.getcwd(),
help='Path to the root of a grader')
# If no arguments are provided, show the usage screen
parser.set_defaults(run=lambda x: parser.print_usage())
# Set up subcommands for each package
subparsers = parser.add_subparsers(title="subcommands")
for name, path in subcommands.items():
module = importlib.import_module(path)
subparser = subparsers.add_parser(name, help=module.help)
module.setup_parser(subparser)
# The 'help' command shows the help screen
help_parser = subparsers.add_parser("help", help="Show this help screen")
help_parser.set_defaults(run=lambda x: parser.print_help())
# Parse CLI args
args = parser.parse_args()
# Do it
args.run(args)
if __name__ == '__main__':
run()
|
6bef0dc50470bc71c15e0fb7c86f03e69c416e67
|
scrapi/harvesters/datacite.py
|
scrapi/harvesters/datacite.py
|
'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
|
'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
''' In the DataCite OAI PMH api, there are often 2 descriptions: A type and
a longer kind of abstract. If there are two options, pick the second one which
is almost always the longer abstract
'''
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
|
Add docstring explaiing why to take the second description
|
Add docstring explaiing why to take the second description
|
Python
|
apache-2.0
|
CenterForOpenScience/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,ostwald/scrapi,erinspace/scrapi,mehanig/scrapi,felliott/scrapi,fabianvf/scrapi,erinspace/scrapi,alexgarciac/scrapi,felliott/scrapi,mehanig/scrapi,jeffreyliu3230/scrapi
|
'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
Add docstring explaiing why to take the second description
|
'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
''' In the DataCite OAI PMH api, there are often 2 descriptions: A type and
a longer kind of abstract. If there are two options, pick the second one which
is almost always the longer abstract
'''
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
|
<commit_before>'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
<commit_msg>Add docstring explaiing why to take the second description<commit_after>
|
'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
''' In the DataCite OAI PMH api, there are often 2 descriptions: A type and
a longer kind of abstract. If there are two options, pick the second one which
is almost always the longer abstract
'''
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
|
'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
Add docstring explaiing why to take the second description'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
''' In the DataCite OAI PMH api, there are often 2 descriptions: A type and
a longer kind of abstract. If there are two options, pick the second one which
is almost always the longer abstract
'''
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
|
<commit_before>'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
<commit_msg>Add docstring explaiing why to take the second description<commit_after>'''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
''' In the DataCite OAI PMH api, there are often 2 descriptions: A type and
a longer kind of abstract. If there are two options, pick the second one which
is almost always the longer abstract
'''
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
|
40af3e546a9024f7bb7786828d22534f8dff103a
|
neutron_fwaas/common/fwaas_constants.py
|
neutron_fwaas/common/fwaas_constants.py
|
# Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
L3_AGENT = 'l3_agent'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
|
# Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
|
Remove unused constant for topics
|
Remove unused constant for topics
While reading the code, I found "L3_AGENT" topic is defined
but never be used.
Change-Id: I9b6da61f9fe5224d2c25bbe7cc55fd508b4e240f
|
Python
|
apache-2.0
|
openstack/neutron-fwaas,openstack/neutron-fwaas
|
# Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
L3_AGENT = 'l3_agent'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
Remove unused constant for topics
While reading the code, I found "L3_AGENT" topic is defined
but never be used.
Change-Id: I9b6da61f9fe5224d2c25bbe7cc55fd508b4e240f
|
# Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
|
<commit_before># Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
L3_AGENT = 'l3_agent'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
<commit_msg>Remove unused constant for topics
While reading the code, I found "L3_AGENT" topic is defined
but never be used.
Change-Id: I9b6da61f9fe5224d2c25bbe7cc55fd508b4e240f<commit_after>
|
# Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
|
# Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
L3_AGENT = 'l3_agent'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
Remove unused constant for topics
While reading the code, I found "L3_AGENT" topic is defined
but never be used.
Change-Id: I9b6da61f9fe5224d2c25bbe7cc55fd508b4e240f# Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
|
<commit_before># Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
L3_AGENT = 'l3_agent'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
<commit_msg>Remove unused constant for topics
While reading the code, I found "L3_AGENT" topic is defined
but never be used.
Change-Id: I9b6da61f9fe5224d2c25bbe7cc55fd508b4e240f<commit_after># Copyright 2015 Cisco Systems, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
FIREWALL = 'FIREWALL'
FIREWALL_V2 = 'FIREWALL_V2'
# Constants for "topics"
FIREWALL_PLUGIN = 'q-firewall-plugin'
FW_AGENT = 'firewall_agent'
FIREWALL_RULE_LIST = 'firewall_rule_list'
|
5055e01bb7ea340ef96204a360002907c43fac91
|
tsserver/models.py
|
tsserver/models.py
|
from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def __init__(self, timestamp, temperature, pressure):
self.timestamp = timestamp
self.temperature = temperature
self.pressure = pressure
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
|
from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
|
Remove Telemetry.__init__ as not necessary
|
Remove Telemetry.__init__ as not necessary
|
Python
|
mit
|
m4tx/techswarm-server
|
from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def __init__(self, timestamp, temperature, pressure):
self.timestamp = timestamp
self.temperature = temperature
self.pressure = pressure
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
Remove Telemetry.__init__ as not necessary
|
from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
|
<commit_before>from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def __init__(self, timestamp, temperature, pressure):
self.timestamp = timestamp
self.temperature = temperature
self.pressure = pressure
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
<commit_msg>Remove Telemetry.__init__ as not necessary<commit_after>
|
from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
|
from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def __init__(self, timestamp, temperature, pressure):
self.timestamp = timestamp
self.temperature = temperature
self.pressure = pressure
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
Remove Telemetry.__init__ as not necessaryfrom tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
|
<commit_before>from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def __init__(self, timestamp, temperature, pressure):
self.timestamp = timestamp
self.temperature = temperature
self.pressure = pressure
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
<commit_msg>Remove Telemetry.__init__ as not necessary<commit_after>from tsserver import db
from tsserver.dtutils import datetime_to_str
class Telemetry(db.Model):
"""
All the data that is going to be obtained in regular time intervals
(every second or so).
"""
timestamp = db.Column(db.DateTime, primary_key=True)
temperature = db.Column(db.Float)
"""Temperature in Celsius."""
pressure = db.Column(db.Float)
"""Air pressure in hPa."""
def as_dict(self):
return {'timestamp': datetime_to_str(self.timestamp),
'temperature': self.temperature,
'pressure': self.pressure}
|
9f42cd231375475d27c6fe298ec862065c34f8ca
|
armstrong/core/arm_sections/views.py
|
armstrong/core/arm_sections/views.py
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(TemplateView):
well_title = None
def get_section(self):
return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
def get_context_data(self, **kwargs):
context = super(SimpleSectionView, self).get_context_data(**kwargs)
context["section"] = self.get_section()
return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
Refactor SimpleSectionView to inherit DetailView
|
Refactor SimpleSectionView to inherit DetailView
|
Python
|
apache-2.0
|
armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections,texastribune/armstrong.core.tt_sections
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(TemplateView):
well_title = None
def get_section(self):
return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
def get_context_data(self, **kwargs):
context = super(SimpleSectionView, self).get_context_data(**kwargs)
context["section"] = self.get_section()
return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
Refactor SimpleSectionView to inherit DetailView
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
<commit_before>from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(TemplateView):
well_title = None
def get_section(self):
return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
def get_context_data(self, **kwargs):
context = super(SimpleSectionView, self).get_context_data(**kwargs)
context["section"] = self.get_section()
return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
<commit_msg>Refactor SimpleSectionView to inherit DetailView<commit_after>
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(TemplateView):
well_title = None
def get_section(self):
return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
def get_context_data(self, **kwargs):
context = super(SimpleSectionView, self).get_context_data(**kwargs)
context["section"] = self.get_section()
return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
Refactor SimpleSectionView to inherit DetailViewfrom django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
<commit_before>from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import TemplateView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(TemplateView):
well_title = None
def get_section(self):
return get_object_or_404(Section, full_slug=self.kwargs['full_slug'])
def get_context_data(self, **kwargs):
context = super(SimpleSectionView, self).get_context_data(**kwargs)
context["section"] = self.get_section()
return context
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
<commit_msg>Refactor SimpleSectionView to inherit DetailView<commit_after>from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
2ca5ccb861962a021f81b6e794f5372d8079216f
|
fml/generatechangedfilelist.py
|
fml/generatechangedfilelist.py
|
import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
md5dir = os.path.abspath(sys.argv[1])
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(md5dir,"temp","server.md5")
postlist = os.path.join(md5dir,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
with open(list_file, 'w') as fh:
fh.write(difflist)
if __name__ == '__main__':
main()
|
import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
mcp_root = os.path.abspath(sys.argv[1])
sys.path.append(os.path.join(mcp_root,"runtime"))
from filehandling.srgshandler import parse_srg
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(mcp_root,"temp","server.md5")
postlist = os.path.join(mcp_root,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
srg_data = parse_srg(os.path.join(mcp_root,"temp","server_rg.srg")
classes=dict()
for row in srg_data['CL']:
classes[row['deobf_name']] = row['obf_name']
with open(list_file, 'w') as fh:
for diff in difflist:
(clazz,md5)=diff.strip().split()
if clazz in classes:
clazz=classes[clazz]
fh.write("%s\n" %(clazz))
if __name__ == '__main__':
main()
|
Tweak file list script to print obf names
|
Tweak file list script to print obf names
|
Python
|
lgpl-2.1
|
Zaggy1024/MinecraftForge,luacs1998/MinecraftForge,simon816/MinecraftForge,karlthepagan/MinecraftForge,jdpadrnos/MinecraftForge,bonii-xx/MinecraftForge,mickkay/MinecraftForge,dmf444/MinecraftForge,Theerapak/MinecraftForge,ThiagoGarciaAlves/MinecraftForge,blay09/MinecraftForge,shadekiller666/MinecraftForge,Mathe172/MinecraftForge,CrafterKina/MinecraftForge,RainWarrior/MinecraftForge,fcjailybo/MinecraftForge,Vorquel/MinecraftForge,brubo1/MinecraftForge,Ghostlyr/MinecraftForge
|
import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
md5dir = os.path.abspath(sys.argv[1])
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(md5dir,"temp","server.md5")
postlist = os.path.join(md5dir,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
with open(list_file, 'w') as fh:
fh.write(difflist)
if __name__ == '__main__':
main()
Tweak file list script to print obf names
|
import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
mcp_root = os.path.abspath(sys.argv[1])
sys.path.append(os.path.join(mcp_root,"runtime"))
from filehandling.srgshandler import parse_srg
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(mcp_root,"temp","server.md5")
postlist = os.path.join(mcp_root,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
srg_data = parse_srg(os.path.join(mcp_root,"temp","server_rg.srg")
classes=dict()
for row in srg_data['CL']:
classes[row['deobf_name']] = row['obf_name']
with open(list_file, 'w') as fh:
for diff in difflist:
(clazz,md5)=diff.strip().split()
if clazz in classes:
clazz=classes[clazz]
fh.write("%s\n" %(clazz))
if __name__ == '__main__':
main()
|
<commit_before>import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
md5dir = os.path.abspath(sys.argv[1])
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(md5dir,"temp","server.md5")
postlist = os.path.join(md5dir,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
with open(list_file, 'w') as fh:
fh.write(difflist)
if __name__ == '__main__':
main()
<commit_msg>Tweak file list script to print obf names<commit_after>
|
import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
mcp_root = os.path.abspath(sys.argv[1])
sys.path.append(os.path.join(mcp_root,"runtime"))
from filehandling.srgshandler import parse_srg
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(mcp_root,"temp","server.md5")
postlist = os.path.join(mcp_root,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
srg_data = parse_srg(os.path.join(mcp_root,"temp","server_rg.srg")
classes=dict()
for row in srg_data['CL']:
classes[row['deobf_name']] = row['obf_name']
with open(list_file, 'w') as fh:
for diff in difflist:
(clazz,md5)=diff.strip().split()
if clazz in classes:
clazz=classes[clazz]
fh.write("%s\n" %(clazz))
if __name__ == '__main__':
main()
|
import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
md5dir = os.path.abspath(sys.argv[1])
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(md5dir,"temp","server.md5")
postlist = os.path.join(md5dir,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
with open(list_file, 'w') as fh:
fh.write(difflist)
if __name__ == '__main__':
main()
Tweak file list script to print obf namesimport sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
mcp_root = os.path.abspath(sys.argv[1])
sys.path.append(os.path.join(mcp_root,"runtime"))
from filehandling.srgshandler import parse_srg
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(mcp_root,"temp","server.md5")
postlist = os.path.join(mcp_root,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
srg_data = parse_srg(os.path.join(mcp_root,"temp","server_rg.srg")
classes=dict()
for row in srg_data['CL']:
classes[row['deobf_name']] = row['obf_name']
with open(list_file, 'w') as fh:
for diff in difflist:
(clazz,md5)=diff.strip().split()
if clazz in classes:
clazz=classes[clazz]
fh.write("%s\n" %(clazz))
if __name__ == '__main__':
main()
|
<commit_before>import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
md5dir = os.path.abspath(sys.argv[1])
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(md5dir,"temp","server.md5")
postlist = os.path.join(md5dir,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
with open(list_file, 'w') as fh:
fh.write(difflist)
if __name__ == '__main__':
main()
<commit_msg>Tweak file list script to print obf names<commit_after>import sys
import os
import commands
import fnmatch
import re
import subprocess, shlex
mcp_root = os.path.abspath(sys.argv[1])
sys.path.append(os.path.join(mcp_root,"runtime"))
from filehandling.srgshandler import parse_srg
def cmdsplit(args):
if os.sep == '\\':
args = args.replace('\\', '\\\\')
return shlex.split(args)
def main():
list_file = os.path.abspath(sys.argv[2])
prelist = os.path.join(mcp_root,"temp","server.md5")
postlist = os.path.join(mcp_root,"temp","server_reobf.md5")
cmd = 'diff --unchanged-group-format='' --old-group-format='' --new-group-format=\'%%>\' --changed-group-format=\'%%>\' %s %s' % (prelist, postlist)
process = subprocess.Popen(cmdsplit(cmd), stdout=subprocess.PIPE, bufsize=-1)
difflist,_= process.communicate()
srg_data = parse_srg(os.path.join(mcp_root,"temp","server_rg.srg")
classes=dict()
for row in srg_data['CL']:
classes[row['deobf_name']] = row['obf_name']
with open(list_file, 'w') as fh:
for diff in difflist:
(clazz,md5)=diff.strip().split()
if clazz in classes:
clazz=classes[clazz]
fh.write("%s\n" %(clazz))
if __name__ == '__main__':
main()
|
18332cdac7c7dcb2ef64e3a9ad17b8b229387af8
|
spraakbanken/s5/spr_local/reconstruct_corpus.py
|
spraakbanken/s5/spr_local/reconstruct_corpus.py
|
#!/usr/bin/env python3
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
|
#!/usr/bin/env python3
from __future__ import print_function
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
|
Add reconstruct corpus as a test
|
Add reconstruct corpus as a test
|
Python
|
apache-2.0
|
psmit/kaldi-recipes,phsmit/kaldi-recipes,psmit/kaldi-recipes,psmit/kaldi-recipes,phsmit/kaldi-recipes
|
#!/usr/bin/env python3
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
Add reconstruct corpus as a test
|
#!/usr/bin/env python3
from __future__ import print_function
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
|
<commit_before>#!/usr/bin/env python3
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
<commit_msg>Add reconstruct corpus as a test<commit_after>
|
#!/usr/bin/env python3
from __future__ import print_function
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
|
#!/usr/bin/env python3
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
Add reconstruct corpus as a test#!/usr/bin/env python3
from __future__ import print_function
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
|
<commit_before>#!/usr/bin/env python3
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
<commit_msg>Add reconstruct corpus as a test<commit_after>#!/usr/bin/env python3
from __future__ import print_function
import argparse
import collections
import random
import sys
def reconstruct(f_in, f_out):
sentence_starts = []
contexts = {}
for line in f_in:
parts = line.split()
words = parts[:-1]
count = int(parts[-1])
if words[0] == "<s>" and words[-1] == "</s>":
for _ in range(count):
print(" ".join(words), file=f_out)
continue
context = tuple(words[:-1])
if context not in contexts:
contexts[context] = collections.Counter()
contexts[context][words[-1]] += count
random.shuffle(sentence_starts)
c = len(sentence_starts[0]) - 1
for start in sentence_starts:
line = list(start)
while line[-1] != "</s>":
context = line[:-c]
next_word = contexts[context].most_common(1)[0][0]
contexts[context][next_word] -= 1
line.append(next_word)
print(" ".join(line), file=f_out)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Construct corpus')
parser.add_argument('infile', nargs='?', type=argparse.FileType('r', encoding='utf-8'), default=sys.stdin)
parser.add_argument('outfile', nargs='?', type=argparse.FileType('w', encoding='utf-8'), default=sys.stdout)
args = parser.parse_args()
reconstruct(args.infile, args.outfile)
|
1301f5c5d5b9087ec32e3cf78f93ab9a4e708426
|
geokey_dataimports/__init__.py
|
geokey_dataimports/__init__.py
|
"""Main initialisation for extension."""
VERSION = (0, 4, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
|
"""Main initialisation for extension."""
VERSION = (0, 5, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
|
Increment minor version number ahead of release
|
Increment minor version number ahead of release
|
Python
|
mit
|
ExCiteS/geokey-dataimports,ExCiteS/geokey-dataimports,ExCiteS/geokey-dataimports
|
"""Main initialisation for extension."""
VERSION = (0, 4, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
Increment minor version number ahead of release
|
"""Main initialisation for extension."""
VERSION = (0, 5, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
|
<commit_before>"""Main initialisation for extension."""
VERSION = (0, 4, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
<commit_msg>Increment minor version number ahead of release<commit_after>
|
"""Main initialisation for extension."""
VERSION = (0, 5, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
|
"""Main initialisation for extension."""
VERSION = (0, 4, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
Increment minor version number ahead of release"""Main initialisation for extension."""
VERSION = (0, 5, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
|
<commit_before>"""Main initialisation for extension."""
VERSION = (0, 4, 1)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
<commit_msg>Increment minor version number ahead of release<commit_after>"""Main initialisation for extension."""
VERSION = (0, 5, 0)
__version__ = '.'.join(map(str, VERSION))
try:
from geokey.extensions.base import register
register(
'geokey_dataimports',
'Data Imports',
display_admin=True,
superuser=False,
version=__version__
)
except BaseException:
print('Please install GeoKey first')
|
29c5391078aaa9e3c18356a18ca1c5d6f3bf82e9
|
src/temp_functions.py
|
src/temp_functions.py
|
def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
|
def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
def f_to_c(temp):
temp_k = f_to_k(temp)
result = k_to_c(temp_k)
return result
|
Write a function covert far to cesis
|
Write a function covert far to cesis
|
Python
|
mit
|
xykang/2015-05-12-BUSM-git,xykang/2015-05-12-BUSM-git
|
def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
Write a function covert far to cesis
|
def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
def f_to_c(temp):
temp_k = f_to_k(temp)
result = k_to_c(temp_k)
return result
|
<commit_before>def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
<commit_msg>Write a function covert far to cesis<commit_after>
|
def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
def f_to_c(temp):
temp_k = f_to_k(temp)
result = k_to_c(temp_k)
return result
|
def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
Write a function covert far to cesisdef k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
def f_to_c(temp):
temp_k = f_to_k(temp)
result = k_to_c(temp_k)
return result
|
<commit_before>def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
<commit_msg>Write a function covert far to cesis<commit_after>def k_to_c(temp):
return temp - 273.15
def f_to_k(temp):
return ((temp - 32) * (5 / 9)) + 273.15
def f_to_c(temp):
temp_k = f_to_k(temp)
result = k_to_c(temp_k)
return result
|
5b6f4f51eb761b87881d99148e7dae013af09eb6
|
zgres/utils.py
|
zgres/utils.py
|
import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
sleep_time = 10
try:
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
# TODO: can we do some kind of backoff?
finally:
time.sleep(sleep_time)
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
|
import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
|
Remove sleep time as it appeared to hang forever
|
Remove sleep time as it appeared to hang forever
|
Python
|
mit
|
jinty/zgres,jinty/zgres
|
import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
sleep_time = 10
try:
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
# TODO: can we do some kind of backoff?
finally:
time.sleep(sleep_time)
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
Remove sleep time as it appeared to hang forever
|
import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
|
<commit_before>import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
sleep_time = 10
try:
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
# TODO: can we do some kind of backoff?
finally:
time.sleep(sleep_time)
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
<commit_msg>Remove sleep time as it appeared to hang forever<commit_after>
|
import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
|
import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
sleep_time = 10
try:
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
# TODO: can we do some kind of backoff?
finally:
time.sleep(sleep_time)
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
Remove sleep time as it appeared to hang foreverimport sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
|
<commit_before>import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
sleep_time = 10
try:
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
# TODO: can we do some kind of backoff?
finally:
time.sleep(sleep_time)
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
<commit_msg>Remove sleep time as it appeared to hang forever<commit_after>import sys
import time
import asyncio
import logging
def pg_lsn_to_int(pos):
# http://www.postgresql.org/docs/9.4/static/datatype-pg-lsn.html
# see http://eulerto.blogspot.com.es/2011/11/understanding-wal-nomenclature.html
logfile, offset = pos.split('/')
return 0xFF000000 * int(logfile, 16) + int(offset, 16)
def exception_handler(loop, context):
loop.default_exception_handler(context)
logging.error('Unexpected exception, exiting...')
sys.exit(1)
def run_asyncio(*callback_and_args):
loop = asyncio.get_event_loop()
loop.set_exception_handler(exception_handler)
if callback_and_args:
loop.call_soon(*callback_and_args)
loop.run_forever()
logging.info('Exiting after being asked to stop nicely')
return 0
|
06c3f0c5d4764b745d87bb814cbd87213bb7f747
|
infrastructure/aws/attach-index-volume.py
|
infrastructure/aws/attach-index-volume.py
|
# Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=100,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
|
# Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=200,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
|
Use a bigger volume so it doesn't fill up
|
Use a bigger volume so it doesn't fill up
|
Python
|
mpl-2.0
|
bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/searchfox,bill-mccloskey/searchfox
|
# Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=100,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
Use a bigger volume so it doesn't fill up
|
# Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=200,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
|
<commit_before># Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=100,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
<commit_msg>Use a bigger volume so it doesn't fill up<commit_after>
|
# Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=200,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
|
# Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=100,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
Use a bigger volume so it doesn't fill up# Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=200,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
|
<commit_before># Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=100,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
<commit_msg>Use a bigger volume so it doesn't fill up<commit_after># Creates an EBS volume for the index and attaches it to a given instance as /dev/xvdf.
# Prints the volume ID on stdout.
# Usage: attach-index-volume.py <channel> <instance-id>
import sys
import boto3
import awslib
from datetime import datetime
channel = sys.argv[1]
instanceId = sys.argv[2]
ec2 = boto3.resource('ec2')
client = boto3.client('ec2')
# Find availability zone
instances = ec2.instances.filter(InstanceIds=[instanceId])
instance = list(instances)[0]
r = client.create_volume(
Size=200,
VolumeType='gp2',
AvailabilityZone=instance.placement['AvailabilityZone'],
)
volumeId = r['VolumeId']
awslib.await_volume(client, volumeId, 'creating', 'available')
client.create_tags(Resources=[volumeId], Tags=[{
'Key': 'index',
'Value': str(datetime.now()),
}, {
'Key': 'channel',
'Value': channel,
}])
instance.attach_volume(VolumeId=volumeId, Device='xvdf')
awslib.await_volume(client, volumeId, 'available', 'in-use')
print volumeId
|
f284a1487551850c23d251a1d501e88025261369
|
readthedocs/rtd_tests/tests/test_hacks.py
|
readthedocs/rtd_tests/tests/test_hacks.py
|
from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
def test_hack_correct_import(self):
import itertools
self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
|
from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
#def test_hack_correct_import(self):
#import itertools
#self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
|
Comment out known failing test for now (code not in prod).
|
Comment out known failing test for now (code not in prod).
|
Python
|
mit
|
atsuyim/readthedocs.org,Tazer/readthedocs.org,espdev/readthedocs.org,VishvajitP/readthedocs.org,kdkeyser/readthedocs.org,nyergler/pythonslides,kenshinthebattosai/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,mrshoki/readthedocs.org,asampat3090/readthedocs.org,VishvajitP/readthedocs.org,ojii/readthedocs.org,rtfd/readthedocs.org,attakei/readthedocs-oauth,Carreau/readthedocs.org,fujita-shintaro/readthedocs.org,davidfischer/readthedocs.org,tddv/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,techtonik/readthedocs.org,d0ugal/readthedocs.org,hach-que/readthedocs.org,LukasBoersma/readthedocs.org,mrshoki/readthedocs.org,KamranMackey/readthedocs.org,kenwang76/readthedocs.org,cgourlay/readthedocs.org,nikolas/readthedocs.org,takluyver/readthedocs.org,clarkperkins/readthedocs.org,dirn/readthedocs.org,espdev/readthedocs.org,clarkperkins/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,gjtorikian/readthedocs.org,takluyver/readthedocs.org,raven47git/readthedocs.org,singingwolfboy/readthedocs.org,GovReady/readthedocs.org,nikolas/readthedocs.org,gjtorikian/readthedocs.org,safwanrahman/readthedocs.org,soulshake/readthedocs.org,royalwang/readthedocs.org,CedarLogic/readthedocs.org,LukasBoersma/readthedocs.org,soulshake/readthedocs.org,attakei/readthedocs-oauth,techtonik/readthedocs.org,kenwang76/readthedocs.org,sid-kap/readthedocs.org,sunnyzwh/readthedocs.org,kenwang76/readthedocs.org,johncosta/private-readthedocs.org,espdev/readthedocs.org,espdev/readthedocs.org,emawind84/readthedocs.org,michaelmcandrew/readthedocs.org,hach-que/readthedocs.org,royalwang/readthedocs.org,Tazer/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,sid-kap/readthedocs.org,raven47git/readthedocs.org,Carreau/readthedocs.org,laplaceliu/readthedocs.org,clarkperkins/readthedocs.org,mrshoki/readthedocs.org,kdkeyser/readthedocs.org,pombredanne/readthedocs.org,alex/readthedocs.org,jerel/readthedocs.org,singingwolfboy/readthedocs.org,dirn/readthedocs.org,LukasBoersma/readthedocs.org,fujita-shintaro/readthedocs.org,asampat3090/readthedocs.org,Carreau/readthedocs.org,techtonik/readthedocs.org,sid-kap/readthedocs.org,cgourlay/readthedocs.org,takluyver/readthedocs.org,GovReady/readthedocs.org,emawind84/readthedocs.org,d0ugal/readthedocs.org,espdev/readthedocs.org,SteveViss/readthedocs.org,dirn/readthedocs.org,mhils/readthedocs.org,attakei/readthedocs-oauth,asampat3090/readthedocs.org,sid-kap/readthedocs.org,raven47git/readthedocs.org,wanghaven/readthedocs.org,agjohnson/readthedocs.org,attakei/readthedocs-oauth,agjohnson/readthedocs.org,sils1297/readthedocs.org,laplaceliu/readthedocs.org,d0ugal/readthedocs.org,GovReady/readthedocs.org,sils1297/readthedocs.org,fujita-shintaro/readthedocs.org,pombredanne/readthedocs.org,d0ugal/readthedocs.org,atsuyim/readthedocs.org,dirn/readthedocs.org,kenwang76/readthedocs.org,soulshake/readthedocs.org,jerel/readthedocs.org,KamranMackey/readthedocs.org,ojii/readthedocs.org,Carreau/readthedocs.org,CedarLogic/readthedocs.org,raven47git/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,Tazer/readthedocs.org,stevepiercy/readthedocs.org,takluyver/readthedocs.org,titiushko/readthedocs.org,wijerasa/readthedocs.org,sunnyzwh/readthedocs.org,johncosta/private-readthedocs.org,royalwang/readthedocs.org,nyergler/pythonslides,cgourlay/readthedocs.org,KamranMackey/readthedocs.org,safwanrahman/readthedocs.org,atsuyim/readthedocs.org,GovReady/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,alex/readthedocs.org,soulshake/readthedocs.org,sunnyzwh/readthedocs.org,nikolas/readthedocs.org,kdkeyser/readthedocs.org,safwanrahman/readthedocs.org,hach-que/readthedocs.org,agjohnson/readthedocs.org,VishvajitP/readthedocs.org,hach-que/readthedocs.org,kenshinthebattosai/readthedocs.org,tddv/readthedocs.org,asampat3090/readthedocs.org,pombredanne/readthedocs.org,mrshoki/readthedocs.org,sils1297/readthedocs.org,cgourlay/readthedocs.org,kenshinthebattosai/readthedocs.org,davidfischer/readthedocs.org,singingwolfboy/readthedocs.org,safwanrahman/readthedocs.org,techtonik/readthedocs.org,johncosta/private-readthedocs.org,michaelmcandrew/readthedocs.org,kenshinthebattosai/readthedocs.org,CedarLogic/readthedocs.org,davidfischer/readthedocs.org,ojii/readthedocs.org,kdkeyser/readthedocs.org,titiushko/readthedocs.org,SteveViss/readthedocs.org,jerel/readthedocs.org,wijerasa/readthedocs.org,SteveViss/readthedocs.org,emawind84/readthedocs.org,mhils/readthedocs.org,atsuyim/readthedocs.org,laplaceliu/readthedocs.org,CedarLogic/readthedocs.org,wanghaven/readthedocs.org,jerel/readthedocs.org,fujita-shintaro/readthedocs.org,mhils/readthedocs.org,tddv/readthedocs.org,alex/readthedocs.org,stevepiercy/readthedocs.org,gjtorikian/readthedocs.org,LukasBoersma/readthedocs.org,singingwolfboy/readthedocs.org,nyergler/pythonslides,ojii/readthedocs.org,laplaceliu/readthedocs.org,nikolas/readthedocs.org,wanghaven/readthedocs.org,michaelmcandrew/readthedocs.org,michaelmcandrew/readthedocs.org,stevepiercy/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org,agjohnson/readthedocs.org,SteveViss/readthedocs.org,emawind84/readthedocs.org,istresearch/readthedocs.org,istresearch/readthedocs.org,rtfd/readthedocs.org,Tazer/readthedocs.org,nyergler/pythonslides,sunnyzwh/readthedocs.org,sils1297/readthedocs.org,alex/readthedocs.org,stevepiercy/readthedocs.org
|
from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
def test_hack_correct_import(self):
import itertools
self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
Comment out known failing test for now (code not in prod).
|
from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
#def test_hack_correct_import(self):
#import itertools
#self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
|
<commit_before>from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
def test_hack_correct_import(self):
import itertools
self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
<commit_msg>Comment out known failing test for now (code not in prod).<commit_after>
|
from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
#def test_hack_correct_import(self):
#import itertools
#self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
|
from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
def test_hack_correct_import(self):
import itertools
self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
Comment out known failing test for now (code not in prod).from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
#def test_hack_correct_import(self):
#import itertools
#self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
|
<commit_before>from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
def test_hack_correct_import(self):
import itertools
self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
<commit_msg>Comment out known failing test for now (code not in prod).<commit_after>from django.test import TestCase
from core import hacks
class TestHacks(TestCase):
fixtures = ['eric.json', 'test_data.json']
def setUp(self):
hacks.patch_meta_path()
def tearDown(self):
hacks.unpatch_meta_path()
def test_hack_failed_import(self):
import boogy
self.assertTrue(str(boogy), "<Silly Human, I'm not real>")
#def test_hack_correct_import(self):
#import itertools
#self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
|
475d39b9af464649dbab27349180b286f0042fa8
|
dthm4kaiako/utils/get_upload_filepath.py
|
dthm4kaiako/utils/get_upload_filepath.py
|
"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources`', str(component.resource.pk), filename)
|
"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources', str(component.resource.pk), filename)
|
Fix typo in resource media path
|
Fix typo in resource media path
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources`', str(component.resource.pk), filename)
Fix typo in resource media path
|
"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources', str(component.resource.pk), filename)
|
<commit_before>"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources`', str(component.resource.pk), filename)
<commit_msg>Fix typo in resource media path<commit_after>
|
"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources', str(component.resource.pk), filename)
|
"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources`', str(component.resource.pk), filename)
Fix typo in resource media path"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources', str(component.resource.pk), filename)
|
<commit_before>"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources`', str(component.resource.pk), filename)
<commit_msg>Fix typo in resource media path<commit_after>"""Helper functions for determining file paths for uploads."""
from os.path import join
from datetime import datetime
from pytz import timezone
# This is duplicated here to avoid circular dependency with settings file
TIME_ZONE = 'NZ'
def get_upload_path_for_date(category):
"""Create upload path for file by date.
Args:
category (str): Name for directory to upload to.
Returns:
String of path for upload.
"""
return join(category, datetime.now(timezone(TIME_ZONE)).strftime('%Y/%m/%d'))
def get_resource_upload_path(component, filename):
"""Create upload path for resource by primary key.
Required by model FileField/ImageField.
Args:
component (ResourceComponent): Component object file is being added to.
filename (str): Filename of file.
Returns:
String of path and filename for upload.
"""
return join('resources', str(component.resource.pk), filename)
|
02420b89671cf7a90c357efe24997d3142353a18
|
bokeh/command/tests/test_bootstrap.py
|
bokeh/command/tests/test_bootstrap.py
|
import pytest
from bokeh.command.bootstrap import main
def test_no_subcommand(capsys):
with pytest.raises(SystemExit):
main(["bokeh"])
out, err = capsys.readouterr()
assert err == "ERROR: Must specify subcommand, one of: html, json or serve\n"
assert out == ""
|
Test running `bokeh` with no subcommand
|
Test running `bokeh` with no subcommand
|
Python
|
bsd-3-clause
|
dennisobrien/bokeh,clairetang6/bokeh,azjps/bokeh,KasperPRasmussen/bokeh,msarahan/bokeh,draperjames/bokeh,stonebig/bokeh,phobson/bokeh,dennisobrien/bokeh,ptitjano/bokeh,draperjames/bokeh,KasperPRasmussen/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,percyfal/bokeh,aiguofer/bokeh,aavanian/bokeh,ericmjl/bokeh,rs2/bokeh,DuCorey/bokeh,bokeh/bokeh,Karel-van-de-Plassche/bokeh,azjps/bokeh,philippjfr/bokeh,clairetang6/bokeh,bokeh/bokeh,bokeh/bokeh,dennisobrien/bokeh,azjps/bokeh,schoolie/bokeh,mindriot101/bokeh,quasiben/bokeh,percyfal/bokeh,phobson/bokeh,philippjfr/bokeh,mindriot101/bokeh,ericmjl/bokeh,ericmjl/bokeh,jakirkham/bokeh,jakirkham/bokeh,ptitjano/bokeh,bokeh/bokeh,schoolie/bokeh,rs2/bokeh,aavanian/bokeh,jakirkham/bokeh,quasiben/bokeh,schoolie/bokeh,dennisobrien/bokeh,aavanian/bokeh,ptitjano/bokeh,justacec/bokeh,stonebig/bokeh,aiguofer/bokeh,phobson/bokeh,KasperPRasmussen/bokeh,philippjfr/bokeh,DuCorey/bokeh,justacec/bokeh,aiguofer/bokeh,rs2/bokeh,DuCorey/bokeh,KasperPRasmussen/bokeh,stonebig/bokeh,aiguofer/bokeh,draperjames/bokeh,quasiben/bokeh,Karel-van-de-Plassche/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,mindriot101/bokeh,phobson/bokeh,schoolie/bokeh,jakirkham/bokeh,jakirkham/bokeh,clairetang6/bokeh,ericmjl/bokeh,azjps/bokeh,msarahan/bokeh,ericmjl/bokeh,msarahan/bokeh,philippjfr/bokeh,timsnyder/bokeh,percyfal/bokeh,bokeh/bokeh,msarahan/bokeh,justacec/bokeh,stonebig/bokeh,timsnyder/bokeh,timsnyder/bokeh,aavanian/bokeh,DuCorey/bokeh,rs2/bokeh,clairetang6/bokeh,phobson/bokeh,draperjames/bokeh,philippjfr/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,ptitjano/bokeh,draperjames/bokeh,dennisobrien/bokeh,azjps/bokeh,timsnyder/bokeh,rs2/bokeh,mindriot101/bokeh,timsnyder/bokeh,aavanian/bokeh,percyfal/bokeh,DuCorey/bokeh
|
Test running `bokeh` with no subcommand
|
import pytest
from bokeh.command.bootstrap import main
def test_no_subcommand(capsys):
with pytest.raises(SystemExit):
main(["bokeh"])
out, err = capsys.readouterr()
assert err == "ERROR: Must specify subcommand, one of: html, json or serve\n"
assert out == ""
|
<commit_before><commit_msg>Test running `bokeh` with no subcommand<commit_after>
|
import pytest
from bokeh.command.bootstrap import main
def test_no_subcommand(capsys):
with pytest.raises(SystemExit):
main(["bokeh"])
out, err = capsys.readouterr()
assert err == "ERROR: Must specify subcommand, one of: html, json or serve\n"
assert out == ""
|
Test running `bokeh` with no subcommandimport pytest
from bokeh.command.bootstrap import main
def test_no_subcommand(capsys):
with pytest.raises(SystemExit):
main(["bokeh"])
out, err = capsys.readouterr()
assert err == "ERROR: Must specify subcommand, one of: html, json or serve\n"
assert out == ""
|
<commit_before><commit_msg>Test running `bokeh` with no subcommand<commit_after>import pytest
from bokeh.command.bootstrap import main
def test_no_subcommand(capsys):
with pytest.raises(SystemExit):
main(["bokeh"])
out, err = capsys.readouterr()
assert err == "ERROR: Must specify subcommand, one of: html, json or serve\n"
assert out == ""
|
|
34fe6e4f499385cc437a720db0f54db0f0ba07d2
|
tests/tests_twobody/test_mean_elements.py
|
tests/tests_twobody/test_mean_elements.py
|
import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body is invalid." in excinfo.exconly()
|
import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
Add test for error check
|
Add test for error check
|
Python
|
mit
|
poliastro/poliastro
|
import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body is invalid." in excinfo.exconly()
Add test for error check
|
import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
<commit_before>import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body is invalid." in excinfo.exconly()
<commit_msg>Add test for error check<commit_after>
|
import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body is invalid." in excinfo.exconly()
Add test for error checkimport pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
<commit_before>import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body is invalid." in excinfo.exconly()
<commit_msg>Add test for error check<commit_after>import pytest
from poliastro.twobody.mean_elements import get_mean_elements
def test_get_mean_elements_raises_error_if_invalid_body():
body = "Sun"
with pytest.raises(ValueError) as excinfo:
get_mean_elements(body)
assert f"The input body '{body}' is invalid." in excinfo.exconly()
|
dddf89e519e40ce118509dcb5823ad932fea88f8
|
chainer/training/triggers/__init__.py
|
chainer/training/triggers/__init__.py
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
|
Fix the order of importing
|
Fix the order of importing
|
Python
|
mit
|
niboshi/chainer,wkentaro/chainer,ktnyt/chainer,keisuke-umezawa/chainer,jnishi/chainer,hvy/chainer,chainer/chainer,niboshi/chainer,wkentaro/chainer,hvy/chainer,okuta/chainer,anaruse/chainer,keisuke-umezawa/chainer,tkerola/chainer,keisuke-umezawa/chainer,wkentaro/chainer,okuta/chainer,ktnyt/chainer,keisuke-umezawa/chainer,okuta/chainer,ktnyt/chainer,niboshi/chainer,ktnyt/chainer,jnishi/chainer,chainer/chainer,aonotas/chainer,jnishi/chainer,okuta/chainer,pfnet/chainer,hvy/chainer,ronekko/chainer,jnishi/chainer,rezoo/chainer,chainer/chainer,niboshi/chainer,chainer/chainer,wkentaro/chainer,hvy/chainer
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
Fix the order of importing
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
|
<commit_before>from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
<commit_msg>Fix the order of importing<commit_after>
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
|
from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
Fix the order of importingfrom chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
|
<commit_before>from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
<commit_msg>Fix the order of importing<commit_after>from chainer.training.triggers import interval_trigger # NOQA
from chainer.training.triggers import minmax_value_trigger # NOQA
# import class and function
from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA
from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA
from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
|
6469a486847823d36a8e804755c6165d0f2fd670
|
bpython/__init__.py
|
bpython/__init__.py
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
Set version to mercurial in trunk
|
Set version to mercurial in trunk
|
Python
|
mit
|
5monkeys/bpython
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
Set version to mercurial in trunk
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
<commit_before># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
<commit_msg>Set version to mercurial in trunk<commit_after>
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
Set version to mercurial in trunk# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
<commit_before># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
<commit_msg>Set version to mercurial in trunk<commit_after># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
d128b13e9c05516dcba587c684ef2f54884d6bb6
|
api/migrations/0001_create_application.py
|
api/migrations/0001_create_application.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized http://localhost:8000/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
Add an additional default redirect_uri
|
Add an additional default redirect_uri
(runserver's default port)
|
Python
|
bsd-3-clause
|
hotosm/osm-export-tool2,hotosm/osm-export-tool2,hotosm/osm-export-tool2,hotosm/osm-export-tool2
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
Add an additional default redirect_uri
(runserver's default port)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized http://localhost:8000/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
<commit_msg>Add an additional default redirect_uri
(runserver's default port)<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized http://localhost:8000/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
Add an additional default redirect_uri
(runserver's default port)# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized http://localhost:8000/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
<commit_msg>Add an additional default redirect_uri
(runserver's default port)<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-26 20:29
from __future__ import unicode_literals
from django.db import migrations
from oauth2_provider.models import Application
class Migration(migrations.Migration):
def add_default_application(apps, schema_editor):
Application.objects.create(
name="OSM Export Tool UI",
redirect_uris=
"http://localhost/authorized http://localhost:8080/authorized http://localhost:8000/authorized",
client_type=Application.CLIENT_PUBLIC,
authorization_grant_type=Application.GRANT_IMPLICIT,
skip_authorization=True)
dependencies = [
("oauth2_provider", "0005_auto_20170514_1141"),
]
operations = [
migrations.RunPython(add_default_application),
]
|
e885f0557037d2df03453961acd1c40b7c44c069
|
timesheet_activity_report/__openerp__.py
|
timesheet_activity_report/__openerp__.py
|
# -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
|
# -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'support': 'support@elico-corp.com',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
|
Add support key for Travis LINT check
|
Add support key for Travis LINT check
|
Python
|
agpl-3.0
|
Elico-Corp/odoo-addons,Elico-Corp/odoo-addons,Elico-Corp/odoo-addons
|
# -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
Add support key for Travis LINT check
|
# -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'support': 'support@elico-corp.com',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
<commit_msg>Add support key for Travis LINT check<commit_after>
|
# -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'support': 'support@elico-corp.com',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
|
# -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
Add support key for Travis LINT check# -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'support': 'support@elico-corp.com',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
<commit_msg>Add support key for Travis LINT check<commit_after># -*- coding: utf-8 -*-
# © 2015 Elico corp (www.elico-corp.com)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Timesheet Activities Report',
'version': '8.0.1.1.0',
'category': 'Human Resources',
'depends': [
'project_timesheet',
'project_issue_sheet',
'business_requirement',
'business_requirement_deliverable_project',
'project_task_category'
],
'author': 'Elico Corp',
'support': 'support@elico-corp.com',
'license': 'AGPL-3',
'website': 'https://www.elico-corp.com',
'data': [
'report/timesheet_activity_report_view.xml',
'security/ir.model.access.csv',
],
'installable': True,
'application': False
}
|
60d6f3ea5495503584220c60353df833304aff53
|
linkedin_scraper/parsers/base.py
|
linkedin_scraper/parsers/base.py
|
from os import path
import linkedin_scraper
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
def parse(self, item):
raise NotImplemented()
|
import logging
from os import path
import linkedin_scraper
logger = logging.getLogger(__name__)
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
try:
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
except FileNotFoundError:
logger.error('%s not found', name)
return set()
def parse(self, item):
raise NotImplemented()
|
Handle non existing data files in BaseParser.
|
Handle non existing data files in BaseParser.
|
Python
|
mit
|
nihn/linkedin-scraper,nihn/linkedin-scraper
|
from os import path
import linkedin_scraper
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
def parse(self, item):
raise NotImplemented()
Handle non existing data files in BaseParser.
|
import logging
from os import path
import linkedin_scraper
logger = logging.getLogger(__name__)
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
try:
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
except FileNotFoundError:
logger.error('%s not found', name)
return set()
def parse(self, item):
raise NotImplemented()
|
<commit_before>from os import path
import linkedin_scraper
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
def parse(self, item):
raise NotImplemented()
<commit_msg>Handle non existing data files in BaseParser.<commit_after>
|
import logging
from os import path
import linkedin_scraper
logger = logging.getLogger(__name__)
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
try:
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
except FileNotFoundError:
logger.error('%s not found', name)
return set()
def parse(self, item):
raise NotImplemented()
|
from os import path
import linkedin_scraper
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
def parse(self, item):
raise NotImplemented()
Handle non existing data files in BaseParser.import logging
from os import path
import linkedin_scraper
logger = logging.getLogger(__name__)
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
try:
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
except FileNotFoundError:
logger.error('%s not found', name)
return set()
def parse(self, item):
raise NotImplemented()
|
<commit_before>from os import path
import linkedin_scraper
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
def parse(self, item):
raise NotImplemented()
<commit_msg>Handle non existing data files in BaseParser.<commit_after>import logging
from os import path
import linkedin_scraper
logger = logging.getLogger(__name__)
class BaseParser:
@staticmethod
def get_data_dir():
return path.abspath(path.join(linkedin_scraper.__file__,
'../..', 'data'))
@staticmethod
def normalize_lines(lines):
return set(line.lower().strip() for line in lines)
def get_lines_from_datafile(self, name: str) -> set:
"""
Get and normalize lines from datafile.
:param name: name of the file in package data directory
"""
try:
with open(path.join(self.get_data_dir(), name)) as f:
return self.normalize_lines(f)
except FileNotFoundError:
logger.error('%s not found', name)
return set()
def parse(self, item):
raise NotImplemented()
|
5dacf6d2e2e74b783e39641674fc0f8e718618b3
|
imager/ImagerProfile/models.py
|
imager/ImagerProfile/models.py
|
from django.db import models
# from django.conf import settings
# Create your models here.
class ImagerProfile(models.Model):
profile_picture = models.ImageField()
# user = models.OneToOneField(settings.AUTH_USER_MODEL)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField()
phone_privacy = models.BooleanField()
birthday_privacy = models.BooleanField()
name_privacy = models.BooleanField()
email_privacy = models.BooleanField()
|
from django.db import models
from django.contrib.auth.models import User
# class ImagerProfile(models.Manager):
# pass
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
# objects = ImagerProfile()
profile_picture = models.ImageField(null=True)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField(default=False)
phone_privacy = models.BooleanField(default=False)
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
|
Change ImagerProfile model privacy booleans to default of False, profile_picture to nullable
|
Change ImagerProfile model privacy booleans to default of False, profile_picture to nullable
|
Python
|
mit
|
nbeck90/django-imager,nbeck90/django-imager
|
from django.db import models
# from django.conf import settings
# Create your models here.
class ImagerProfile(models.Model):
profile_picture = models.ImageField()
# user = models.OneToOneField(settings.AUTH_USER_MODEL)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField()
phone_privacy = models.BooleanField()
birthday_privacy = models.BooleanField()
name_privacy = models.BooleanField()
email_privacy = models.BooleanField()
Change ImagerProfile model privacy booleans to default of False, profile_picture to nullable
|
from django.db import models
from django.contrib.auth.models import User
# class ImagerProfile(models.Manager):
# pass
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
# objects = ImagerProfile()
profile_picture = models.ImageField(null=True)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField(default=False)
phone_privacy = models.BooleanField(default=False)
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
|
<commit_before>from django.db import models
# from django.conf import settings
# Create your models here.
class ImagerProfile(models.Model):
profile_picture = models.ImageField()
# user = models.OneToOneField(settings.AUTH_USER_MODEL)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField()
phone_privacy = models.BooleanField()
birthday_privacy = models.BooleanField()
name_privacy = models.BooleanField()
email_privacy = models.BooleanField()
<commit_msg>Change ImagerProfile model privacy booleans to default of False, profile_picture to nullable<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
# class ImagerProfile(models.Manager):
# pass
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
# objects = ImagerProfile()
profile_picture = models.ImageField(null=True)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField(default=False)
phone_privacy = models.BooleanField(default=False)
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
|
from django.db import models
# from django.conf import settings
# Create your models here.
class ImagerProfile(models.Model):
profile_picture = models.ImageField()
# user = models.OneToOneField(settings.AUTH_USER_MODEL)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField()
phone_privacy = models.BooleanField()
birthday_privacy = models.BooleanField()
name_privacy = models.BooleanField()
email_privacy = models.BooleanField()
Change ImagerProfile model privacy booleans to default of False, profile_picture to nullablefrom django.db import models
from django.contrib.auth.models import User
# class ImagerProfile(models.Manager):
# pass
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
# objects = ImagerProfile()
profile_picture = models.ImageField(null=True)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField(default=False)
phone_privacy = models.BooleanField(default=False)
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
|
<commit_before>from django.db import models
# from django.conf import settings
# Create your models here.
class ImagerProfile(models.Model):
profile_picture = models.ImageField()
# user = models.OneToOneField(settings.AUTH_USER_MODEL)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField()
phone_privacy = models.BooleanField()
birthday_privacy = models.BooleanField()
name_privacy = models.BooleanField()
email_privacy = models.BooleanField()
<commit_msg>Change ImagerProfile model privacy booleans to default of False, profile_picture to nullable<commit_after>from django.db import models
from django.contrib.auth.models import User
# class ImagerProfile(models.Manager):
# pass
class ImagerProfile(models.Model):
user = models.OneToOneField(User)
# objects = ImagerProfile()
profile_picture = models.ImageField(null=True)
phone_number = models.CharField(max_length=15) # X(XXX) XXX-XXXX
birthday = models.DateField()
picture_privacy = models.BooleanField(default=False)
phone_privacy = models.BooleanField(default=False)
birthday_privacy = models.BooleanField(default=False)
name_privacy = models.BooleanField(default=False)
email_privacy = models.BooleanField(default=False)
|
1f03af4a3ceda754dc0196c49f295fc683bd6e5a
|
opps/core/cache/models.py
|
opps/core/cache/models.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
class ModelCaching(models.Model):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from .managers import CacheManager
ModelBase = type(models.Model)
class MetaCaching(ModelBase):
def __new__(*args, **kwargs):
new_class = ModelBase.__new__(*args, **kwargs)
new_manager = CacheManager()
new_manager.contribute_to_class(new_class, 'objects')
new_class._default_manager = new_manager
return new_class
|
Create MetaCaching, ModelBase for core cache
|
Create MetaCaching, ModelBase for core cache
|
Python
|
mit
|
YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
class ModelCaching(models.Model):
pass
Create MetaCaching, ModelBase for core cache
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from .managers import CacheManager
ModelBase = type(models.Model)
class MetaCaching(ModelBase):
def __new__(*args, **kwargs):
new_class = ModelBase.__new__(*args, **kwargs)
new_manager = CacheManager()
new_manager.contribute_to_class(new_class, 'objects')
new_class._default_manager = new_manager
return new_class
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
class ModelCaching(models.Model):
pass
<commit_msg>Create MetaCaching, ModelBase for core cache<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from .managers import CacheManager
ModelBase = type(models.Model)
class MetaCaching(ModelBase):
def __new__(*args, **kwargs):
new_class = ModelBase.__new__(*args, **kwargs)
new_manager = CacheManager()
new_manager.contribute_to_class(new_class, 'objects')
new_class._default_manager = new_manager
return new_class
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
class ModelCaching(models.Model):
pass
Create MetaCaching, ModelBase for core cache#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from .managers import CacheManager
ModelBase = type(models.Model)
class MetaCaching(ModelBase):
def __new__(*args, **kwargs):
new_class = ModelBase.__new__(*args, **kwargs)
new_manager = CacheManager()
new_manager.contribute_to_class(new_class, 'objects')
new_class._default_manager = new_manager
return new_class
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
class ModelCaching(models.Model):
pass
<commit_msg>Create MetaCaching, ModelBase for core cache<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.db import models
from django.core.cache import cache
from .managers import CacheManager
ModelBase = type(models.Model)
class MetaCaching(ModelBase):
def __new__(*args, **kwargs):
new_class = ModelBase.__new__(*args, **kwargs)
new_manager = CacheManager()
new_manager.contribute_to_class(new_class, 'objects')
new_class._default_manager = new_manager
return new_class
|
8b6d10e8339510bbc745a3167fd1d5a60422b370
|
tests/test_planner.py
|
tests/test_planner.py
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_init_pieces(self):
self.assertEqual(len(self.planner.pieces_needed), 3)
self.assertEqual(self.planner.pieces_needed[0].length, 75)
def test_init_stock(self):
self.assertEqual(len(self.planner.stock_sizes), 3)
self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
Add tests for planner init
|
Add tests for planner init
|
Python
|
mit
|
alanc10n/py-cutplanner
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
Add tests for planner init
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_init_pieces(self):
self.assertEqual(len(self.planner.pieces_needed), 3)
self.assertEqual(self.planner.pieces_needed[0].length, 75)
def test_init_stock(self):
self.assertEqual(len(self.planner.stock_sizes), 3)
self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for planner init<commit_after>
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_init_pieces(self):
self.assertEqual(len(self.planner.pieces_needed), 3)
self.assertEqual(self.planner.pieces_needed[0].length, 75)
def test_init_stock(self):
self.assertEqual(len(self.planner.stock_sizes), 3)
self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
Add tests for planner initimport cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_init_pieces(self):
self.assertEqual(len(self.planner.pieces_needed), 3)
self.assertEqual(self.planner.pieces_needed[0].length, 75)
def test_init_stock(self):
self.assertEqual(len(self.planner.stock_sizes), 3)
self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for planner init<commit_after>import cutplanner
import unittest
class TestPlanner(unittest.TestCase):
def setUp(self):
sizes = [50, 80, 120]
needed = [10, 25, 75]
loss = 0.25
self.planner = cutplanner.Planner(sizes, needed, loss)
def test_init_pieces(self):
self.assertEqual(len(self.planner.pieces_needed), 3)
self.assertEqual(self.planner.pieces_needed[0].length, 75)
def test_init_stock(self):
self.assertEqual(len(self.planner.stock_sizes), 3)
self.assertEqual(self.planner.stock_sizes, [50, 80, 120])
def test_largest_stock(self):
largest = self.planner.largest_stock
self.assertEqual(largest, 120)
def test_finalize(self):
self.planner.cur_stock = cutplanner.Stock(self.planner.largest_stock)
self.planner.cut_piece(cutplanner.Piece(1, 60))
self.planner.finalize_stock()
self.assertEqual(len(self.planner.stock), 1)
self.assertEqual(self.planner.stock[0].length, 80)
if __name__ == '__main__':
unittest.main()
|
88245d2cd66c75e1096eec53882a2750826f03be
|
zerver/migrations/0108_fix_default_string_id.py
|
zerver/migrations/0108_fix_default_string_id.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.filter(deactivated=False).count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.filter(deactivated=False).exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
|
Fix deactivated realm corner cases with 0108.
|
migrations: Fix deactivated realm corner cases with 0108.
Previously the default-string-id migration would not correctly handle
ignoring deactivated realms.
|
Python
|
apache-2.0
|
hackerkid/zulip,dhcrzf/zulip,tommyip/zulip,jackrzhang/zulip,kou/zulip,brockwhittaker/zulip,hackerkid/zulip,brainwane/zulip,andersk/zulip,mahim97/zulip,brainwane/zulip,punchagan/zulip,rht/zulip,timabbott/zulip,mahim97/zulip,punchagan/zulip,synicalsyntax/zulip,brainwane/zulip,synicalsyntax/zulip,brockwhittaker/zulip,rht/zulip,brockwhittaker/zulip,rishig/zulip,jackrzhang/zulip,eeshangarg/zulip,showell/zulip,mahim97/zulip,timabbott/zulip,zulip/zulip,jackrzhang/zulip,synicalsyntax/zulip,rht/zulip,kou/zulip,timabbott/zulip,dhcrzf/zulip,jackrzhang/zulip,punchagan/zulip,timabbott/zulip,showell/zulip,tommyip/zulip,dhcrzf/zulip,jackrzhang/zulip,andersk/zulip,zulip/zulip,Galexrt/zulip,eeshangarg/zulip,punchagan/zulip,tommyip/zulip,hackerkid/zulip,zulip/zulip,brockwhittaker/zulip,Galexrt/zulip,synicalsyntax/zulip,andersk/zulip,rishig/zulip,Galexrt/zulip,timabbott/zulip,Galexrt/zulip,zulip/zulip,dhcrzf/zulip,zulip/zulip,jackrzhang/zulip,mahim97/zulip,Galexrt/zulip,tommyip/zulip,shubhamdhama/zulip,eeshangarg/zulip,hackerkid/zulip,eeshangarg/zulip,eeshangarg/zulip,mahim97/zulip,showell/zulip,punchagan/zulip,zulip/zulip,eeshangarg/zulip,tommyip/zulip,Galexrt/zulip,hackerkid/zulip,rht/zulip,rishig/zulip,zulip/zulip,shubhamdhama/zulip,kou/zulip,kou/zulip,hackerkid/zulip,shubhamdhama/zulip,brainwane/zulip,andersk/zulip,dhcrzf/zulip,showell/zulip,tommyip/zulip,jackrzhang/zulip,brockwhittaker/zulip,brockwhittaker/zulip,rishig/zulip,andersk/zulip,rishig/zulip,dhcrzf/zulip,shubhamdhama/zulip,punchagan/zulip,shubhamdhama/zulip,eeshangarg/zulip,tommyip/zulip,andersk/zulip,hackerkid/zulip,rht/zulip,rht/zulip,showell/zulip,kou/zulip,brainwane/zulip,kou/zulip,showell/zulip,dhcrzf/zulip,rht/zulip,synicalsyntax/zulip,punchagan/zulip,showell/zulip,kou/zulip,timabbott/zulip,rishig/zulip,mahim97/zulip,Galexrt/zulip,synicalsyntax/zulip,shubhamdhama/zulip,timabbott/zulip,andersk/zulip,shubhamdhama/zulip,brainwane/zulip,synicalsyntax/zulip,brainwane/zulip,rishig/zulip
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
migrations: Fix deactivated realm corner cases with 0108.
Previously the default-string-id migration would not correctly handle
ignoring deactivated realms.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.filter(deactivated=False).count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.filter(deactivated=False).exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
<commit_msg>migrations: Fix deactivated realm corner cases with 0108.
Previously the default-string-id migration would not correctly handle
ignoring deactivated realms.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.filter(deactivated=False).count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.filter(deactivated=False).exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
migrations: Fix deactivated realm corner cases with 0108.
Previously the default-string-id migration would not correctly handle
ignoring deactivated realms.# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.filter(deactivated=False).count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.filter(deactivated=False).exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
<commit_msg>migrations: Fix deactivated realm corner cases with 0108.
Previously the default-string-id migration would not correctly handle
ignoring deactivated realms.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-24 02:39
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def fix_realm_string_ids(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Realm = apps.get_model('zerver', 'Realm')
if Realm.objects.filter(deactivated=False).count() != 2:
return
zulip_realm = Realm.objects.get(string_id="zulip")
try:
user_realm = Realm.objects.filter(deactivated=False).exclude(id=zulip_realm.id)[0]
except Realm.DoesNotExist:
return
user_realm.string_id = ""
user_realm.save()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0107_multiuseinvite'),
]
operations = [
migrations.RunPython(fix_realm_string_ids,
reverse_code=migrations.RunPython.noop),
]
|
464fc1e9a905df25b12975422d5b48cf8286306c
|
custom/icds_reports/utils/migrations.py
|
custom/icds_reports/utils/migrations.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
|
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
'aww_incentive_report_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
|
Add aww_incentive report view to migration util
|
Add aww_incentive report view to migration util
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
Add aww_incentive report view to migration util
|
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
'aww_incentive_report_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
|
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
<commit_msg>Add aww_incentive report view to migration util<commit_after>
|
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
'aww_incentive_report_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
|
from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
Add aww_incentive report view to migration utilfrom __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
'aww_incentive_report_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
|
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
<commit_msg>Add aww_incentive report view to migration util<commit_after>from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.sql_db.operations import RawSQLMigration
def get_view_migrations():
sql_views = [
'awc_location_months.sql',
'agg_awc_monthly.sql',
'agg_ccs_record_monthly.sql',
'agg_child_health_monthly.sql',
'daily_attendance.sql',
'agg_awc_daily.sql',
'child_health_monthly.sql',
'disha_indicators.sql',
'ccs_record_monthly_view.sql',
'agg_ls_monthly.sql',
'service_delivery_monthly.sql',
'aww_incentive_report_monthly.sql',
]
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
operations = []
for view in sql_views:
operations.append(migrator.get_migration(view))
return operations
|
4c50bcf29dc397405b21322c6115a00c1df56559
|
indico/modules/events/views.py
|
indico/modules/events/views.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
Add view for the events modules
|
Add view for the events modules
|
Python
|
mit
|
indico/indico,ThiefMaster/indico,ThiefMaster/indico,pferreir/indico,DirkHoffmann/indico,indico/indico,indico/indico,DirkHoffmann/indico,DirkHoffmann/indico,OmeGak/indico,mvidalgarcia/indico,indico/indico,OmeGak/indico,pferreir/indico,mic4ael/indico,mic4ael/indico,ThiefMaster/indico,OmeGak/indico,pferreir/indico,OmeGak/indico,mic4ael/indico,mvidalgarcia/indico,DirkHoffmann/indico,mic4ael/indico,mvidalgarcia/indico,ThiefMaster/indico,pferreir/indico,mvidalgarcia/indico
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
Add view for the events modules
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
<commit_msg>Add view for the events modules<commit_after>
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
Add view for the events modules# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
<commit_before># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
<commit_msg>Add view for the events modules<commit_after># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from MaKaC.webinterface.pages.admins import WPAdminsBase
from MaKaC.webinterface.pages.base import WPJinjaMixin
from MaKaC.webinterface.pages.conferences import WPConferenceDefaultDisplayBase
class WPReferenceTypes(WPJinjaMixin, WPAdminsBase):
template_prefix = 'events/'
class WPEventDisplay(WPJinjaMixin, WPConferenceDefaultDisplayBase):
template_prefix = 'events/'
def _getBody(self, params):
return WPJinjaMixin._getPageContent(self, params)
def getCSSFiles(self):
return WPConferenceDefaultDisplayBase.getCSSFiles(self) + self._asset_env['event_display_sass'].urls()
|
2dda63a9a71764c5f4b5e6d15372dd2eb296ef4b
|
nflpool/services/activeplayers_service.py
|
nflpool/services/activeplayers_service.py
|
import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
# TODO Update season='2017' below to a variable
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
|
import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
|
Remove TODO - complete as of last commit
|
Remove TODO - complete as of last commit
Season object is passed to the table update
|
Python
|
mit
|
prcutler/nflpool,prcutler/nflpool
|
import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
# TODO Update season='2017' below to a variable
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
Remove TODO - complete as of last commit
Season object is passed to the table update
|
import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
|
<commit_before>import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
# TODO Update season='2017' below to a variable
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
<commit_msg>Remove TODO - complete as of last commit
Season object is passed to the table update<commit_after>
|
import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
|
import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
# TODO Update season='2017' below to a variable
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
Remove TODO - complete as of last commit
Season object is passed to the table updateimport requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
|
<commit_before>import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
# TODO Update season='2017' below to a variable
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
<commit_msg>Remove TODO - complete as of last commit
Season object is passed to the table update<commit_after>import requests
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.activeplayers import ActiveNFLPlayers
import nflpool.data.secret as secret
from requests.auth import HTTPBasicAuth
from nflpool.data.seasoninfo import SeasonInfo
class ActivePlayersService:
@classmethod
def add_active_nflplayers(cls, season: int, team_id: int, firstname: str, lastname: str,
position: str, player_id: int):
session = DbSessionFactory.create_session()
season_row = session.query(SeasonInfo).filter(SeasonInfo.id == '1').first()
season = season_row.current_season
response = requests.get('https://api.mysportsfeeds.com/v1.1/pull/nfl/' + season +
'-regular/active_players.json',
auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw))
player_info = response.json()
player_list = player_info["activeplayers"]["playerentry"]
for players in player_list:
try:
firstname = (players["player"]["FirstName"])
lastname = (players["player"]["LastName"])
player_id = (players["player"]["ID"])
team_id = (players["team"]["ID"])
position = (players["player"]["Position"])
except KeyError:
continue
active_players = ActiveNFLPlayers(firstname=firstname, lastname=lastname, player_id=player_id,
team_id=team_id, position=position, season=season)
session.add(active_players)
session.commit()
|
7e97ddd43b8b388091e62f29d8a31875d8637d71
|
tinman/__init__.py
|
tinman/__init__.py
|
#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
|
#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5"
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
|
Fix a small release bug
|
Fix a small release bug
|
Python
|
bsd-3-clause
|
lucius-feng/tinman,gmr/tinman,lucius-feng/tinman,gmr/tinman,lucius-feng/tinman
|
#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
Fix a small release bug
|
#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5"
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
|
<commit_before>#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
<commit_msg>Fix a small release bug<commit_after>
|
#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5"
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
|
#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
Fix a small release bug#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5"
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
|
<commit_before>#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
<commit_msg>Fix a small release bug<commit_after>#!/usr/bin/env python
"""
Core Tinman imports
"""
__author__ = 'Gavin M. Roy'
__email__ = '<gmr@myyearbook.com>'
__since__ = '2011-03-14'
__version__ = "0.2.5"
__all__ = ['tinman.application',
'tinman.cache',
'tinman.cli',
'tinman.clients',
'tinman.utils',
'tinman.whitelist']
def main(*args):
# Import our CLI parser
from tinman import cli
# Run the main routine
process = cli.TinmanProcess()
process.run()
if __name__ == "__main__":
# Run our CLI process
main()
|
1b68bd3c5cb81f06ccc4dcf69baeafca1104ed37
|
nirikshak/workers/files/ini.py
|
nirikshak/workers/files/ini.py
|
# Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
|
# Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
try:
import configparser
except ImportError:
import ConfigParser as configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
|
Support for python3.5 has been added for configparser
|
Support for python3.5 has been added for configparser
|
Python
|
apache-2.0
|
thenakliman/nirikshak,thenakliman/nirikshak
|
# Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
Support for python3.5 has been added for configparser
|
# Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
try:
import configparser
except ImportError:
import ConfigParser as configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
|
<commit_before># Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
<commit_msg>Support for python3.5 has been added for configparser<commit_after>
|
# Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
try:
import configparser
except ImportError:
import ConfigParser as configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
|
# Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
Support for python3.5 has been added for configparser# Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
try:
import configparser
except ImportError:
import ConfigParser as configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
|
<commit_before># Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
<commit_msg>Support for python3.5 has been added for configparser<commit_after># Copyright 2017 <thenakliman@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
try:
import configparser
except ImportError:
import ConfigParser as configparser
from nirikshak.common import plugins
from nirikshak.workers import base
LOG = logging.getLogger(__name__)
@plugins.register('ini')
class INIConfigValidatorWorker(base.Worker):
@base.match_expected_output
@base.validate(required=('file', 'section', 'key'))
def work(self, **kwargs):
k = kwargs['input']['args']
config = configparser.ConfigParser()
config.read(k['file'])
value = None
try:
value = config.get(k['section'], k['key'])
LOG.info("%s configuration option found in %s section",
k['section'], k['key'])
except Exception:
LOG.error("Not able to find %s configuration parameter in %s "
"section", k['key'], k['section'], exc_info=True)
return value
|
7a1056e7c929b07220fdefb45e282104ee192836
|
github3/__init__.py
|
github3/__init__.py
|
"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
|
"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
from .legacy import LegacyUser, LegacyRepo, LegacyIssue
from .org import Organization, Team
from .pulls import PullRequest
from .repo import Repository, Branch
from .user import User
|
Add more objects to the default namespace.
|
Add more objects to the default namespace.
Ease of testing, I'm not exactly a fan of polluting it though. Might rework
this later.
|
Python
|
bsd-3-clause
|
wbrefvem/github3.py,itsmemattchung/github3.py,christophelec/github3.py,sigmavirus24/github3.py,icio/github3.py,h4ck3rm1k3/github3.py,krxsky/github3.py,jim-minter/github3.py,ueg1990/github3.py,agamdua/github3.py,degustaf/github3.py,balloob/github3.py
|
"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
Add more objects to the default namespace.
Ease of testing, I'm not exactly a fan of polluting it though. Might rework
this later.
|
"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
from .legacy import LegacyUser, LegacyRepo, LegacyIssue
from .org import Organization, Team
from .pulls import PullRequest
from .repo import Repository, Branch
from .user import User
|
<commit_before>"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
<commit_msg>Add more objects to the default namespace.
Ease of testing, I'm not exactly a fan of polluting it though. Might rework
this later.<commit_after>
|
"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
from .legacy import LegacyUser, LegacyRepo, LegacyIssue
from .org import Organization, Team
from .pulls import PullRequest
from .repo import Repository, Branch
from .user import User
|
"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
Add more objects to the default namespace.
Ease of testing, I'm not exactly a fan of polluting it though. Might rework
this later."""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
from .legacy import LegacyUser, LegacyRepo, LegacyIssue
from .org import Organization, Team
from .pulls import PullRequest
from .repo import Repository, Branch
from .user import User
|
<commit_before>"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
<commit_msg>Add more objects to the default namespace.
Ease of testing, I'm not exactly a fan of polluting it though. Might rework
this later.<commit_after>"""
github3
=======
See http://github3py.rtfd.org/ for documentation.
:copyright: (c) 2012 by Ian Cordasco
:license: Modified BSD, see LICENSE for more details
"""
__title__ = 'github3'
__author__ = 'Ian Cordasco'
__license__ = 'Modified BSD'
__copyright__ = 'Copyright 2012 Ian Cordasco'
__version__ = '0.1a3'
from .api import *
from .github import GitHub
from .models import GitHubError
from .event import Event
from .gist import Gist, GistComment, GistFile
from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash
from .issue import Issue, IssueComment, IssueEvent, Label, Milestone
from .legacy import LegacyUser, LegacyRepo, LegacyIssue
from .org import Organization, Team
from .pulls import PullRequest
from .repo import Repository, Branch
from .user import User
|
689980daec94683557113163d0b7384c33904bbf
|
app/aandete/model/model.py
|
app/aandete/model/model.py
|
from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
|
from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
@classmethod
def get_by_id(cls, id):
id = int(id)
key = db.Key.from_path('Recipe', id)
return db.Model.get(key)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
|
Allow string IDs in get_by_id.
|
Allow string IDs in get_by_id.
|
Python
|
bsd-3-clause
|
stefanv/aandete,stefanv/aandete
|
from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
Allow string IDs in get_by_id.
|
from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
@classmethod
def get_by_id(cls, id):
id = int(id)
key = db.Key.from_path('Recipe', id)
return db.Model.get(key)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
|
<commit_before>from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
<commit_msg>Allow string IDs in get_by_id.<commit_after>
|
from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
@classmethod
def get_by_id(cls, id):
id = int(id)
key = db.Key.from_path('Recipe', id)
return db.Model.get(key)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
|
from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
Allow string IDs in get_by_id.from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
@classmethod
def get_by_id(cls, id):
id = int(id)
key = db.Key.from_path('Recipe', id)
return db.Model.get(key)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
|
<commit_before>from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
<commit_msg>Allow string IDs in get_by_id.<commit_after>from google.appengine.ext import db
class Recipe(db.Model):
title = db.StringProperty(required=True)
text = db.TextProperty()
ingredients = db.TextProperty()
tags = db.StringListProperty()
photo = db.BlobProperty()
owner = db.UserProperty(auto_current_user_add=True, required=True)
@classmethod
def get_by_id(cls, id):
id = int(id)
key = db.Key.from_path('Recipe', id)
return db.Model.get(key)
class Cookbook(db.Model):
title = db.StringProperty(required=True)
desc = db.Text()
recipes = db.ListProperty(int) # list of recipe keys
shared_with = db.ListProperty(db.Email)
owner = db.UserProperty(auto_current_user_add=True, required=True)
|
932e3d8b00768b1b3c103d3d44f714db5bb3a3e6
|
examples/field_example.py
|
examples/field_example.py
|
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
|
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
|
Fix flake8 issues in field example
|
Fix flake8 issues in field example
|
Python
|
mit
|
Globegitter/graphene,graphql-python/graphene,Globegitter/graphene,sjhewitt/graphene,graphql-python/graphene,sjhewitt/graphene
|
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
Fix flake8 issues in field example
|
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
|
<commit_before>import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
<commit_msg>Fix flake8 issues in field example<commit_after>
|
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
|
import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
Fix flake8 issues in field exampleimport graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
|
<commit_before>import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
<commit_msg>Fix flake8 issues in field example<commit_after>import graphene
class Person(graphene.Interface):
name = graphene.String()
age = graphene.ID()
class Patron(Person):
id = graphene.ID()
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(self, args, info):
return Patron(id=1, name='Demo')
schema = graphene.Schema(query=Query)
query = '''
query something{
patron {
id
name
}
}
'''
result = schema.execute(query)
# Print the result
print result.data['patron']
|
d60c7e9471c442112d66a9b15838bd22fdd76600
|
libraries/encryption.py
|
libraries/encryption.py
|
from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
|
from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password[:-8]
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
|
Exclude the salt from the returned decrypted password
|
Exclude the salt from the returned decrypted password
|
Python
|
agpl-3.0
|
dastergon/identity.gentoo.org,gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,gentoo/identity.gentoo.org
|
from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
Exclude the salt from the returned decrypted password
|
from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password[:-8]
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
|
<commit_before>from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
<commit_msg>Exclude the salt from the returned decrypted password<commit_after>
|
from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password[:-8]
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
|
from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
Exclude the salt from the returned decrypted passwordfrom Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password[:-8]
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
|
<commit_before>from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
<commit_msg>Exclude the salt from the returned decrypted password<commit_after>from Crypto.Cipher import Blowfish
from django.conf import settings
from random import choice
from base64 import encodestring as encode
from base64 import decodestring as decode
import base64
import hashlib
import string
import os
def sha_password(password):
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + encode(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
return base64.b64encode(obj.encrypt(password + settings.SECRET_KEY[:8]))
def decrypt_password(password):
obj = Blowfish.new(settings.BLOWFISH_KEY)
original_password = obj.decrypt(base64.b64decode(password + settings.SECRET_KEY[:8]))
return original_password[:-8]
def random_string(length, type = None):
if type == 'password':
chars = string.printable[:-6]
else:
chars = string.letters + string.digits
return ''.join([choice(chars) for i in range(length)])
|
a047fe167a598adfccc70268a01829b8bcdb11e1
|
python/test/clienttest.py
|
python/test/clienttest.py
|
import molequeue
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)
|
import unittest
import molequeue
class TestClient(unittest.TestCase):
def test_submit_job_request(self):
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)
if __name__ == '__main__':
unittest.main()
|
Convert test case to Python unittest.TestCase
|
Convert test case to Python unittest.TestCase
Use Python's testing framework
Change-Id: I924f329c43294bb1fbb395a7bb19bcaf06ea9385
|
Python
|
bsd-3-clause
|
OpenChemistry/molequeue,OpenChemistry/molequeue,OpenChemistry/molequeue
|
import molequeue
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)Convert test case to Python unittest.TestCase
Use Python's testing framework
Change-Id: I924f329c43294bb1fbb395a7bb19bcaf06ea9385
|
import unittest
import molequeue
class TestClient(unittest.TestCase):
def test_submit_job_request(self):
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import molequeue
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)<commit_msg>Convert test case to Python unittest.TestCase
Use Python's testing framework
Change-Id: I924f329c43294bb1fbb395a7bb19bcaf06ea9385<commit_after>
|
import unittest
import molequeue
class TestClient(unittest.TestCase):
def test_submit_job_request(self):
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)
if __name__ == '__main__':
unittest.main()
|
import molequeue
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)Convert test case to Python unittest.TestCase
Use Python's testing framework
Change-Id: I924f329c43294bb1fbb395a7bb19bcaf06ea9385import unittest
import molequeue
class TestClient(unittest.TestCase):
def test_submit_job_request(self):
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import molequeue
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)<commit_msg>Convert test case to Python unittest.TestCase
Use Python's testing framework
Change-Id: I924f329c43294bb1fbb395a7bb19bcaf06ea9385<commit_after>import unittest
import molequeue
class TestClient(unittest.TestCase):
def test_submit_job_request(self):
client = molequeue.Client()
client.connect_to_server('MoleQueue')
job_request = molequeue.JobRequest()
job_request.queue = 'salix'
job_request.program = 'sleep (testing)'
client.submit_job_request(job_request)
if __name__ == '__main__':
unittest.main()
|
0942ee64b3d84d5ea818a204c16b80d4120e54f2
|
st2common/st2common/transport/__init__.py
|
st2common/st2common/transport/__init__.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import bootstrap_utils, utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'bootstrap_utils',
'utils',
'connection_retry_wrapper'
]
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'utils',
'connection_retry_wrapper'
]
|
Remove some nasty imports to avoid cyclic import issues.
|
Remove some nasty imports to avoid cyclic import issues.
|
Python
|
apache-2.0
|
Plexxi/st2,nzlosh/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,nzlosh/st2,Plexxi/st2,StackStorm/st2,StackStorm/st2,nzlosh/st2,StackStorm/st2,Plexxi/st2
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import bootstrap_utils, utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'bootstrap_utils',
'utils',
'connection_retry_wrapper'
]
Remove some nasty imports to avoid cyclic import issues.
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'utils',
'connection_retry_wrapper'
]
|
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import bootstrap_utils, utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'bootstrap_utils',
'utils',
'connection_retry_wrapper'
]
<commit_msg>Remove some nasty imports to avoid cyclic import issues.<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'utils',
'connection_retry_wrapper'
]
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import bootstrap_utils, utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'bootstrap_utils',
'utils',
'connection_retry_wrapper'
]
Remove some nasty imports to avoid cyclic import issues.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'utils',
'connection_retry_wrapper'
]
|
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import bootstrap_utils, utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'bootstrap_utils',
'utils',
'connection_retry_wrapper'
]
<commit_msg>Remove some nasty imports to avoid cyclic import issues.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.transport import liveaction, actionexecutionstate, execution, publishers, reactor
from st2common.transport import utils, connection_retry_wrapper
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = [
'liveaction',
'actionexecutionstate',
'execution',
'publishers',
'reactor',
'utils',
'connection_retry_wrapper'
]
|
82b45c3ec1344bed87ac7d572d82f43a4320492c
|
craigomatic/wsgi.py
|
craigomatic/wsgi.py
|
"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
|
"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
Integrate whitenoise with the Django application
|
Integrate whitenoise with the Django application
This allows Django to serve static files in production.
|
Python
|
mit
|
rgreinho/craigomatic,rgreinho/craigomatic,rgreinho/craigomatic,rgreinho/craigomatic
|
"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
Integrate whitenoise with the Django application
This allows Django to serve static files in production.
|
"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
<commit_before>"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
<commit_msg>Integrate whitenoise with the Django application
This allows Django to serve static files in production.<commit_after>
|
"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
Integrate whitenoise with the Django application
This allows Django to serve static files in production."""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
<commit_before>"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
<commit_msg>Integrate whitenoise with the Django application
This allows Django to serve static files in production.<commit_after>"""
WSGI config for craigomatic project.
It exposes the WSGI callable as a module-level variable named ``application``.
"""
import os
from os.path import abspath, dirname
from sys import path
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "craigomatic.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
61ebf3fd3a80dc5573cb65c4250ede591d161b9e
|
pyaavso/formats/visual.py
|
pyaavso/formats/visual.py
|
from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
|
from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
fp.write("#DATE=%s\n" % date_format.upper())
|
Write date format in header.
|
Write date format in header.
|
Python
|
mit
|
zsiciarz/pyaavso
|
from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
Write date format in header.
|
from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
fp.write("#DATE=%s\n" % date_format.upper())
|
<commit_before>from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
<commit_msg>Write date format in header.<commit_after>
|
from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
fp.write("#DATE=%s\n" % date_format.upper())
|
from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
Write date format in header.from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
fp.write("#DATE=%s\n" % date_format.upper())
|
<commit_before>from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
<commit_msg>Write date format in header.<commit_after>from __future__ import unicode_literals
import pyaavso
class VisualFormatWriter(object):
"""
A class responsible for writing observation data in AAVSO
`Visual File Format`_.
The API here mimics the ``csv`` module in Python standard library.
.. _`Visual File Format`: http://www.aavso.org/aavso-visual-file-format
"""
def __init__(self, fp, observer_code, delimiter=',', date_format='JD', obstype='Visual'):
"""
Creates the writer which will write observations into the file-like
object given in first parameter. The only other required parameter
is the official AAVSO-assigned observer code.
"""
self.observer_code = observer_code
self.date_format = date_format
self.obstype = obstype
fp.write('#TYPE=Visual\n')
fp.write('#OBSCODE=%s\n' % observer_code)
fp.write("#SOFTWARE=pyaavso %s\n" % pyaavso.get_version())
fp.write("#DATE=%s\n" % date_format.upper())
|
81b9a8179ef4db9857b4d133769c92c7b1972ee6
|
pysuru/tests/test_base.py
|
pysuru/tests/test_base.py
|
# coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
|
# coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_baseapi_conn_should_return_same_object():
api = BaseAPI(None, None)
obj1 = api.conn
obj2 = api.conn
assert obj1 is obj2
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
|
Add test to ensure only one conn object is created
|
Add test to ensure only one conn object is created
|
Python
|
mit
|
rcmachado/pysuru
|
# coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
Add test to ensure only one conn object is created
|
# coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_baseapi_conn_should_return_same_object():
api = BaseAPI(None, None)
obj1 = api.conn
obj2 = api.conn
assert obj1 is obj2
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
|
<commit_before># coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
<commit_msg>Add test to ensure only one conn object is created<commit_after>
|
# coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_baseapi_conn_should_return_same_object():
api = BaseAPI(None, None)
obj1 = api.conn
obj2 = api.conn
assert obj1 is obj2
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
|
# coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
Add test to ensure only one conn object is created# coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_baseapi_conn_should_return_same_object():
api = BaseAPI(None, None)
obj1 = api.conn
obj2 = api.conn
assert obj1 is obj2
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
|
<commit_before># coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
<commit_msg>Add test to ensure only one conn object is created<commit_after># coding: utf-8
from pysuru.base import BaseAPI, ObjectMixin
def test_baseapi_headers_should_return_authorization_header():
api = BaseAPI(None, 'TOKEN')
assert {'Authorization': 'bearer TOKEN'} == api.headers
def test_baseapi_conn_should_return_same_object():
api = BaseAPI(None, None)
obj1 = api.conn
obj2 = api.conn
assert obj1 is obj2
def test_build_url_should_return_full_api_endpoint():
api = BaseAPI('http://example.com/', None)
assert 'http://example.com/apis' == api.build_url('/apis')
api = BaseAPI('http://example.com', None)
assert 'http://example.com/apis' == api.build_url('/apis')
def test_baseobject_create_should_ignore_unknown_fields():
data = {'field1': 'value1', 'unknown': 'ignored'}
created = _DummyObject.create(**data)
assert created.attrs['field1'] == 'value1'
assert 'unknown' not in created.attrs
class _DummyObject(ObjectMixin):
_fields = ('field1', 'field2')
def __init__(self, **kwargs):
self.attrs = kwargs
|
d32710e53b89e1377a64427f934053c3b0d33802
|
bin/intake_multiprocess.py
|
bin/intake_multiprocess.py
|
import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
|
import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
if args.public:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher_public")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher_public")
else:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
|
Use a separate log file for the public launcher data
|
Use a separate log file for the public launcher data
Log files are not thread-safe
|
Python
|
bsd-3-clause
|
sunil07t/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server
|
import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
Use a separate log file for the public launcher data
Log files are not thread-safe
|
import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
if args.public:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher_public")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher_public")
else:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
|
<commit_before>import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
<commit_msg>Use a separate log file for the public launcher data
Log files are not thread-safe<commit_after>
|
import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
if args.public:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher_public")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher_public")
else:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
|
import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
Use a separate log file for the public launcher data
Log files are not thread-safeimport json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
if args.public:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher_public")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher_public")
else:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
|
<commit_before>import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
<commit_msg>Use a separate log file for the public launcher data
Log files are not thread-safe<commit_after>import json
import logging
import argparse
import numpy as np
import emission.pipeline.scheduler as eps
if __name__ == '__main__':
try:
intake_log_config = json.load(open("conf/log/intake.conf", "r"))
except:
intake_log_config = json.load(open("conf/log/intake.conf.sample", "r"))
parser = argparse.ArgumentParser()
parser.add_argument("n_workers", type=int,
help="the number of worker processors to use")
parser.add_argument("-p", "--public", action="store_true",
help="pipeline for public (as opposed to regular) phones")
args = parser.parse_args()
if args.public:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher_public")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher_public")
else:
intake_log_config["handlers"]["file"]["filename"] = intake_log_config["handlers"]["file"]["filename"].replace("intake", "intake_launcher")
intake_log_config["handlers"]["errors"]["filename"] = intake_log_config["handlers"]["errors"]["filename"].replace("intake", "intake_launcher")
logging.config.dictConfig(intake_log_config)
np.random.seed(61297777)
split_lists = eps.get_split_uuid_lists(args.n_workers, args.public)
logging.info("Finished generating split lists %s" % split_lists)
eps.dispatch(split_lists, args.public)
|
454e8203295c7f51e1d660adcaf3d282ded5652f
|
scripts/cluster/craq/start_craq_router.py
|
scripts/cluster/craq/start_craq_router.py
|
#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.30:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.30:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.7:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
Make craq router script point at correct zookeeper node.
|
Make craq router script point at correct zookeeper node.
|
Python
|
bsd-3-clause
|
sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata,sirikata/sirikata
|
#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.30:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.30:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
Make craq router script point at correct zookeeper node.
|
#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.7:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
<commit_before>#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.30:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.30:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Make craq router script point at correct zookeeper node.<commit_after>
|
#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.7:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.30:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.30:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
Make craq router script point at correct zookeeper node.#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.7:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
<commit_before>#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.30:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.30:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
<commit_msg>Make craq router script point at correct zookeeper node.<commit_after>#!/usr/bin/python
import sys
import subprocess
import time
def main():
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10499 -z 192.168.1.7:9888', shell=True)
subprocess.Popen('/home/meru/bmistree/new-craq-dist/craq-router-32 -d meru -p 10498 -z 192.168.1.7:9888', shell=True)
return 0
if __name__ == "__main__":
sys.exit(main())
|
f0371f68fc0ece594710ad9dbbdbfdab00a22e49
|
migrations/003_add_capped_collections.py
|
migrations/003_add_capped_collections.py
|
"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_realtime_pay_legalisation_post",
"fco_realtime_pay_legalisation_drop_off",
"fco_realtime_pay_register_birth_abroad",
"fco_realtime_pay_register_death_abroad",
"fco_realtime_pay_foreign_marriage_certificates",
"fco_realtime_deposit_foreign_marriage",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
|
"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_pay_legalisation_post_realtime",
"fco_pay_legalisation_drop_off_realtime",
"fco_pay_register_birth_abroad_realtime",
"fco_pay_register_death_abroad_realtime",
"fco_pay_foreign_marriage_certificates_realtime",
"fco_deposit_foreign_marriage_realtime",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
|
Make realtim fco bucket names match format of others
|
Make realtim fco bucket names match format of others
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_realtime_pay_legalisation_post",
"fco_realtime_pay_legalisation_drop_off",
"fco_realtime_pay_register_birth_abroad",
"fco_realtime_pay_register_death_abroad",
"fco_realtime_pay_foreign_marriage_certificates",
"fco_realtime_deposit_foreign_marriage",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
Make realtim fco bucket names match format of others
|
"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_pay_legalisation_post_realtime",
"fco_pay_legalisation_drop_off_realtime",
"fco_pay_register_birth_abroad_realtime",
"fco_pay_register_death_abroad_realtime",
"fco_pay_foreign_marriage_certificates_realtime",
"fco_deposit_foreign_marriage_realtime",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
|
<commit_before>"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_realtime_pay_legalisation_post",
"fco_realtime_pay_legalisation_drop_off",
"fco_realtime_pay_register_birth_abroad",
"fco_realtime_pay_register_death_abroad",
"fco_realtime_pay_foreign_marriage_certificates",
"fco_realtime_deposit_foreign_marriage",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
<commit_msg>Make realtim fco bucket names match format of others<commit_after>
|
"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_pay_legalisation_post_realtime",
"fco_pay_legalisation_drop_off_realtime",
"fco_pay_register_birth_abroad_realtime",
"fco_pay_register_death_abroad_realtime",
"fco_pay_foreign_marriage_certificates_realtime",
"fco_deposit_foreign_marriage_realtime",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
|
"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_realtime_pay_legalisation_post",
"fco_realtime_pay_legalisation_drop_off",
"fco_realtime_pay_register_birth_abroad",
"fco_realtime_pay_register_death_abroad",
"fco_realtime_pay_foreign_marriage_certificates",
"fco_realtime_deposit_foreign_marriage",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
Make realtim fco bucket names match format of others"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_pay_legalisation_post_realtime",
"fco_pay_legalisation_drop_off_realtime",
"fco_pay_register_birth_abroad_realtime",
"fco_pay_register_death_abroad_realtime",
"fco_pay_foreign_marriage_certificates_realtime",
"fco_deposit_foreign_marriage_realtime",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
|
<commit_before>"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_realtime_pay_legalisation_post",
"fco_realtime_pay_legalisation_drop_off",
"fco_realtime_pay_register_birth_abroad",
"fco_realtime_pay_register_death_abroad",
"fco_realtime_pay_foreign_marriage_certificates",
"fco_realtime_deposit_foreign_marriage",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
<commit_msg>Make realtim fco bucket names match format of others<commit_after>"""
Add capped collections for real time data
"""
import logging
log = logging.getLogger(__name__)
def up(db):
capped_collections = [
"fco_pay_legalisation_post_realtime",
"fco_pay_legalisation_drop_off_realtime",
"fco_pay_register_birth_abroad_realtime",
"fco_pay_register_death_abroad_realtime",
"fco_pay_foreign_marriage_certificates_realtime",
"fco_deposit_foreign_marriage_realtime",
"govuk_realtime",
"licensing_realtime",
]
for collection_name in capped_collections:
db.create_collection(name=collection_name, capped=True, size=5040)
log.info("created capped collection: %s" % collection_name)
|
db7583b62aad9eaf10c67e89cd46087b36c77d81
|
scikits/image/io/setup.py
|
scikits/image/io/setup.py
|
#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('_plugins/tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
|
#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
|
Move plugin tests to io/tests.
|
io: Move plugin tests to io/tests.
|
Python
|
bsd-3-clause
|
ClinicalGraphics/scikit-image,emmanuelle/scikits.image,juliusbierk/scikit-image,Hiyorimi/scikit-image,almarklein/scikit-image,newville/scikit-image,SamHames/scikit-image,ClinicalGraphics/scikit-image,almarklein/scikit-image,youprofit/scikit-image,jwiggins/scikit-image,Britefury/scikit-image,pratapvardhan/scikit-image,chintak/scikit-image,youprofit/scikit-image,emmanuelle/scikits.image,michaelpacer/scikit-image,almarklein/scikit-image,paalge/scikit-image,bennlich/scikit-image,SamHames/scikit-image,Midafi/scikit-image,ajaybhat/scikit-image,GaZ3ll3/scikit-image,emon10005/scikit-image,Hiyorimi/scikit-image,vighneshbirodkar/scikit-image,warmspringwinds/scikit-image,michaelaye/scikit-image,paalge/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,almarklein/scikit-image,keflavich/scikit-image,dpshelio/scikit-image,jwiggins/scikit-image,blink1073/scikit-image,vighneshbirodkar/scikit-image,GaelVaroquaux/scikits.image,Midafi/scikit-image,rjeli/scikit-image,GaZ3ll3/scikit-image,oew1v07/scikit-image,michaelaye/scikit-image,emmanuelle/scikits.image,warmspringwinds/scikit-image,chriscrosscutler/scikit-image,ofgulban/scikit-image,bsipocz/scikit-image,vighneshbirodkar/scikit-image,WarrenWeckesser/scikits-image,ajaybhat/scikit-image,juliusbierk/scikit-image,emmanuelle/scikits.image,chintak/scikit-image,SamHames/scikit-image,newville/scikit-image,SamHames/scikit-image,rjeli/scikit-image,keflavich/scikit-image,bennlich/scikit-image,Britefury/scikit-image,robintw/scikit-image,rjeli/scikit-image,chintak/scikit-image,robintw/scikit-image,WarrenWeckesser/scikits-image,blink1073/scikit-image,chriscrosscutler/scikit-image,pratapvardhan/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,paalge/scikit-image,GaelVaroquaux/scikits.image,bsipocz/scikit-image,oew1v07/scikit-image,ofgulban/scikit-image,dpshelio/scikit-image
|
#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('_plugins/tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
io: Move plugin tests to io/tests.
|
#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
|
<commit_before>#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('_plugins/tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
<commit_msg>io: Move plugin tests to io/tests.<commit_after>
|
#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
|
#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('_plugins/tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
io: Move plugin tests to io/tests.#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
|
<commit_before>#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('_plugins/tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
<commit_msg>io: Move plugin tests to io/tests.<commit_after>#!/usr/bin/env python
from scikits.image._build import cython
import os.path
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('io', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_files('_plugins/*.ini')
# This function tries to create C files from the given .pyx files. If
# it fails, we build the checked-in .c files.
cython(['_plugins/_colormixer.pyx', '_plugins/_histograms.pyx'],
working_path=base_path)
config.add_extension('_plugins._colormixer',
sources=['_plugins/_colormixer.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('_plugins._histograms',
sources=['_plugins/_histograms.c'],
include_dirs=[get_numpy_include_dirs()])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer = 'scikits.image Developers',
maintainer_email = 'scikits-image@googlegroups.com',
description = 'Image I/O Routines',
url = 'http://stefanv.github.com/scikits.image/',
license = 'Modified BSD',
**(configuration(top_path='').todict())
)
|
459e8ba9ecfd16276d7a623b5f6e61ac9fcedcee
|
kolibri/plugins/html5_viewer/options.py
|
kolibri/plugins/html5_viewer/options.py
|
import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation ",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
|
import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
|
Fix typo in sandbox attribute name.
|
Fix typo in sandbox attribute name.
|
Python
|
mit
|
indirectlylit/kolibri,mrpau/kolibri,mrpau/kolibri,learningequality/kolibri,learningequality/kolibri,mrpau/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,learningequality/kolibri,indirectlylit/kolibri
|
import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation ",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
Fix typo in sandbox attribute name.
|
import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
|
<commit_before>import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation ",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
<commit_msg>Fix typo in sandbox attribute name.<commit_after>
|
import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
|
import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation ",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
Fix typo in sandbox attribute name.import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
|
<commit_before>import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation ",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
<commit_msg>Fix typo in sandbox attribute name.<commit_after>import logging
logger = logging.getLogger(__name__)
# Source: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-sandbox
allowable_sandbox_tokens = set(
[
"allow-downloads-without-user-activation",
"allow-forms",
"allow-modals",
"allow-orientation-lock",
"allow-pointer-lock",
"allow-popups",
"allow-popups-to-escape-sandbox",
"allow-presentation",
"allow-same-origin",
"allow-scripts",
"allow-storage-access-by-user-activation",
"allow-top-navigation",
"allow-top-navigation-by-user-activation",
]
)
def clean_sandbox(sandbox_string):
"""
Clean up sandbox string to ensure it only contains valid items.
"""
sandbox_tokens = []
illegal_tokens = []
for token in sandbox_string.split(" "):
if token in allowable_sandbox_tokens:
sandbox_tokens.append(token)
else:
illegal_tokens.append(token)
if illegal_tokens:
logger.warn(
"Invalid sandbox token passed to options {}".format(
" ".join(illegal_tokens)
)
)
return " ".join(sandbox_tokens)
option_spec = {
"HTML5": {
"SANDBOX": {
"type": "string",
"default": "allow-scripts",
"envvars": ("KOLIBRI_HTML5_SANDBOX",),
"clean": clean_sandbox,
}
}
}
|
43238d0de9e4d6d4909b4d67c17449a9599e5dac
|
mygpo/web/templatetags/time.py
|
mygpo/web/templatetags/time.py
|
from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'0h 16m 40s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
|
from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'16m 40s'
>>> format_duration(10009)
'2h 46m 49s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
if hours:
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
else:
return _('{m}m {s}s').format(m=minutes, s=seconds)
|
Format short durations without "0 hours"
|
Format short durations without "0 hours"
|
Python
|
agpl-3.0
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'0h 16m 40s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
Format short durations without "0 hours"
|
from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'16m 40s'
>>> format_duration(10009)
'2h 46m 49s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
if hours:
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
else:
return _('{m}m {s}s').format(m=minutes, s=seconds)
|
<commit_before>from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'0h 16m 40s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
<commit_msg>Format short durations without "0 hours"<commit_after>
|
from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'16m 40s'
>>> format_duration(10009)
'2h 46m 49s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
if hours:
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
else:
return _('{m}m {s}s').format(m=minutes, s=seconds)
|
from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'0h 16m 40s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
Format short durations without "0 hours"from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'16m 40s'
>>> format_duration(10009)
'2h 46m 49s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
if hours:
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
else:
return _('{m}m {s}s').format(m=minutes, s=seconds)
|
<commit_before>from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'0h 16m 40s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
<commit_msg>Format short durations without "0 hours"<commit_after>from datetime import time
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django import template
register = template.Library()
@register.filter
def sec_to_time(sec):
""" Converts seconds to a time object
>>> t = sec_to_time(1000)
>>> (t.hour, t.minute, t.second)
(0, 16, 40)
"""
s = int(sec)
hour = int(s / 60 / 60)
minute = int((s / 60) % 60)
sec = int(s % 60 )
return time(hour, minute, sec)
@register.filter
@mark_safe
def format_duration(sec):
""" Converts seconds into a duration string
>>> format_duration(1000)
'16m 40s'
>>> format_duration(10009)
'2h 46m 49s'
"""
hours = int(sec / 60 / 60)
minutes = int((sec / 60) % 60)
seconds = int(sec % 60)
if hours:
return _('{h}h {m}m {s}s').format(h=hours, m=minutes, s=seconds)
else:
return _('{m}m {s}s').format(m=minutes, s=seconds)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.