commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
85814828d2caedd8612db6ce0ecec92025a34330 | tests/test_main.py | tests/test_main.py | from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
| from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
| Add test for bitbucket domain | Add test for bitbucket domain
| Python | bsd-3-clause | michaeljoseph/cookiecutter,Springerle/cookiecutter,Springerle/cookiecutter,venumech/cookiecutter,cguardia/cookiecutter,luzfcb/cookiecutter,pjbull/cookiecutter,agconti/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,audreyr/cookiecutter,venumech/cookiecutter,takeflight/cookiecutter,dajose/cookiecutter,takeflight/cookiecutter,agconti/cookiecutter,terryjbates/cookiecutter,cguardia/cookiecutter,stevepiercy/cookiecutter,pjbull/cookiecutter,hackebrot/cookiecutter,luzfcb/cookiecutter,christabor/cookiecutter,ramiroluz/cookiecutter,hackebrot/cookiecutter,christabor/cookiecutter,benthomasson/cookiecutter,benthomasson/cookiecutter,michaeljoseph/cookiecutter,ramiroluz/cookiecutter,moi65/cookiecutter,stevepiercy/cookiecutter,dajose/cookiecutter,terryjbates/cookiecutter,moi65/cookiecutter,willingc/cookiecutter | from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
Add test for bitbucket domain | from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
| <commit_before>from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
<commit_msg>Add test for bitbucket domain<commit_after> | from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
| from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
Add test for bitbucket domainfrom cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
| <commit_before>from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
<commit_msg>Add test for bitbucket domain<commit_after>from cookiecutter.main import is_repo_url
def test_is_repo_url():
"""Verify is_repo_url works."""
assert is_repo_url('gitolite@server:team/repo') is True
assert is_repo_url('git@github.com:audreyr/cookiecutter.git') is True
assert is_repo_url('https://github.com/audreyr/cookiecutter.git') is True
assert is_repo_url('gh:audreyr/cookiecutter-pypackage') is True
assert is_repo_url('https://bitbucket.org/pokoli/cookiecutter.hg') is True
assert is_repo_url('/audreyr/cookiecutter.git') is False
assert is_repo_url('/home/audreyr/cookiecutter') is False
|
0de3f3380eda3ed541fbf37243e13243a5ad6e1e | tests/test_open.py | tests/test_open.py | #!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
| #!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class WebbrowserMock(object):
'''mock the builtin webbrowser module'''
def open(self, url):
'''mock the webbrowser.open() function'''
self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
def test_url_open(self):
'''should attempt to open URL using webbrowser module'''
mock = self.WebbrowserMock()
yvs.webbrowser = mock
yvs.main('nlt/jhn.3.17')
self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
| Add unit test for opening bible reference urls | Add unit test for opening bible reference urls
| Python | mit | caleb531/youversion-suggest,caleb531/youversion-suggest | #!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
Add unit test for opening bible reference urls | #!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class WebbrowserMock(object):
'''mock the builtin webbrowser module'''
def open(self, url):
'''mock the webbrowser.open() function'''
self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
def test_url_open(self):
'''should attempt to open URL using webbrowser module'''
mock = self.WebbrowserMock()
yvs.webbrowser = mock
yvs.main('nlt/jhn.3.17')
self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
| <commit_before>#!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
<commit_msg>Add unit test for opening bible reference urls<commit_after> | #!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class WebbrowserMock(object):
'''mock the builtin webbrowser module'''
def open(self, url):
'''mock the webbrowser.open() function'''
self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
def test_url_open(self):
'''should attempt to open URL using webbrowser module'''
mock = self.WebbrowserMock()
yvs.webbrowser = mock
yvs.main('nlt/jhn.3.17')
self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
| #!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
Add unit test for opening bible reference urls#!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class WebbrowserMock(object):
'''mock the builtin webbrowser module'''
def open(self, url):
'''mock the webbrowser.open() function'''
self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
def test_url_open(self):
'''should attempt to open URL using webbrowser module'''
mock = self.WebbrowserMock()
yvs.webbrowser = mock
yvs.main('nlt/jhn.3.17')
self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
| <commit_before>#!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
<commit_msg>Add unit test for opening bible reference urls<commit_after>#!/usr/bin/env python
import unittest
import yv_suggest.open as yvs
import inspect
class WebbrowserMock(object):
'''mock the builtin webbrowser module'''
def open(self, url):
'''mock the webbrowser.open() function'''
self.url = url
class OpenTestCase(unittest.TestCase):
'''test the handling of Bible reference URLs'''
def test_url(self):
'''should build correct URL to Bible reference'''
url = yvs.get_ref_url('esv/jhn.3.16')
self.assertEqual(url, 'https://www.bible.com/bible/esv/jhn.3.16')
def test_query_param(self):
'''should use received query parameter as default ref ID'''
spec = inspect.getargspec(yvs.main)
default_query_str = spec.defaults[0]
self.assertEqual(default_query_str, '{query}')
def test_url_open(self):
'''should attempt to open URL using webbrowser module'''
mock = self.WebbrowserMock()
yvs.webbrowser = mock
yvs.main('nlt/jhn.3.17')
self.assertEqual(mock.url, 'https://www.bible.com/bible/nlt/jhn.3.17')
|
b676e0ba5ab1f37147cdf2ff28223fc57f37f567 | models/log_entry.py | models/log_entry.py |
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(1000), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
| Reduce the size of log_name so it fits within mysql's limit. | Reduce the size of log_name so it fits within mysql's limit.
| Python | agpl-3.0 | izrik/sawmill,izrik/sawmill,izrik/sawmill |
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(1000), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
Reduce the size of log_name so it fits within mysql's limit. |
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
| <commit_before>
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(1000), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
<commit_msg>Reduce the size of log_name so it fits within mysql's limit.<commit_after> |
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(1000), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
Reduce the size of log_name so it fits within mysql's limit.
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
| <commit_before>
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(1000), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
<commit_msg>Reduce the size of log_name so it fits within mysql's limit.<commit_after>
from database import db
from conversions import datetime_from_str
class LogEntry(db.Model):
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, index=True)
server = db.Column(db.String(100), index=True)
log_name = db.Column(db.String(760), index=True)
message = db.Column(db.Text(), nullable=True)
def __init__(self, timestamp, server, log_name, message):
self.timestamp = datetime_from_str(timestamp)
self.server = server
self.log_name = log_name
self.message = message
def to_dict(self):
return {
'timestamp': self.timestamp,
'server': self.server,
'log_name': self.log_name,
'message': self.message,
}
|
006f957d8b6d747ad701d7b39a411df8f562f17f | modules/karmamod.py | modules/karmamod.py | """Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
except KeyError, TypeError:
result = 0
self.msg(event['target'], '%s has %d karma.' % (event['args'][0], result))
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
| """Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
self.reply('%s has %d karma.' % (event['args'][0], result))
except KeyError, TypeError:
result = 0
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
| Change to reply only if target has karma | Change to reply only if target has karma
| Python | mit | billyvg/piebot | """Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
except KeyError, TypeError:
result = 0
self.msg(event['target'], '%s has %d karma.' % (event['args'][0], result))
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
Change to reply only if target has karma | """Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
self.reply('%s has %d karma.' % (event['args'][0], result))
except KeyError, TypeError:
result = 0
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
| <commit_before>"""Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
except KeyError, TypeError:
result = 0
self.msg(event['target'], '%s has %d karma.' % (event['args'][0], result))
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
<commit_msg>Change to reply only if target has karma<commit_after> | """Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
self.reply('%s has %d karma.' % (event['args'][0], result))
except KeyError, TypeError:
result = 0
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
| """Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
except KeyError, TypeError:
result = 0
self.msg(event['target'], '%s has %d karma.' % (event['args'][0], result))
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
Change to reply only if target has karma"""Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
self.reply('%s has %d karma.' % (event['args'][0], result))
except KeyError, TypeError:
result = 0
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
| <commit_before>"""Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
except KeyError, TypeError:
result = 0
self.msg(event['target'], '%s has %d karma.' % (event['args'][0], result))
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
<commit_msg>Change to reply only if target has karma<commit_after>"""Keeps track of karma counts.
@package ppbot
@syntax .karma <item>
"""
import re
from modules import *
class Karmamod(Module):
def __init__(self, *args, **kwargs):
"""Constructor"""
Module.__init__(self, kwargs=kwargs)
def _register_events(self):
self.add_command('karma', 'get_karma')
self.add_event('pubmsg', 'parsekarma')
@op
def get_karma(self, event):
karma = self.db.karma.find_one({'name': event['args'][0].lower(),
'source': event['target']})
try:
result = karma['count']
self.reply('%s has %d karma.' % (event['args'][0], result))
except KeyError, TypeError:
result = 0
def parsekarma(self, event):
inc_pattern = re.compile('([^ ]{2,})\+\+')
m = inc_pattern.findall(event['message'])
for term in m:
self.change(event, term, 1)
dec_pattern = re.compile('([^ ]{2,})--')
m = dec_pattern.findall(event['message'])
for term in m:
self.change(event, term, -1)
def change(self, event, name, value):
"""Change karma count."""
self.db.karma.update({
'name': name.lower(),
'source': event['target']
}, {
'$inc': {
'count': 1
}
}, True)
|
a4d1659197c0c3da706065d5362fd3b060223c87 | newaccount/views.py | newaccount/views.py | from django.shortcuts import render
from django.http import JsonResponse
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
return JsonResponse({"status": "error", "message": "Test Case"})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
| from django.http import JsonResponse
from django.contrib.auth.models import User
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.shortcuts import render
import urllib
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
msg = ''
if not('username' in request.GET):
msg = 'Name not given!'
elif not('email' in request.GET):
msg = 'Email not given!'
name = urllib.unquote(request.GET['username'])
email = urllib.unquote(request.GET['email'])
if not msg: msg = __validate_name__(name)
if not msg: msg = __validate_email__(email)
status = 'error' if msg else 'success'
return JsonResponse({"status": status, "message": msg})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
def __validate_name__(name):
''' Internal validation function for username '''
lname = len(name)
if lname < 5:
return 'User name must be at least 5 characters long'
if lname > 64:
return 'User name must not be longer than 64 characters'
if len(User.objects.filter(username=name)):
return 'User name already in use'
return ''
def __validate_email__(email):
''' Internal validation function for email '''
try:
validate_email(email)
except ValidationError:
return 'Invalid email address: '+email
return ''
| Implement backend newaccount form verification | Implement backend newaccount form verification
| Python | mit | NicolasKiely/Robit-Tracker,NicolasKiely/Robit-Tracker,NicolasKiely/Robit-Tracker | from django.shortcuts import render
from django.http import JsonResponse
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
return JsonResponse({"status": "error", "message": "Test Case"})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
Implement backend newaccount form verification | from django.http import JsonResponse
from django.contrib.auth.models import User
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.shortcuts import render
import urllib
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
msg = ''
if not('username' in request.GET):
msg = 'Name not given!'
elif not('email' in request.GET):
msg = 'Email not given!'
name = urllib.unquote(request.GET['username'])
email = urllib.unquote(request.GET['email'])
if not msg: msg = __validate_name__(name)
if not msg: msg = __validate_email__(email)
status = 'error' if msg else 'success'
return JsonResponse({"status": status, "message": msg})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
def __validate_name__(name):
''' Internal validation function for username '''
lname = len(name)
if lname < 5:
return 'User name must be at least 5 characters long'
if lname > 64:
return 'User name must not be longer than 64 characters'
if len(User.objects.filter(username=name)):
return 'User name already in use'
return ''
def __validate_email__(email):
''' Internal validation function for email '''
try:
validate_email(email)
except ValidationError:
return 'Invalid email address: '+email
return ''
| <commit_before>from django.shortcuts import render
from django.http import JsonResponse
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
return JsonResponse({"status": "error", "message": "Test Case"})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
<commit_msg>Implement backend newaccount form verification<commit_after> | from django.http import JsonResponse
from django.contrib.auth.models import User
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.shortcuts import render
import urllib
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
msg = ''
if not('username' in request.GET):
msg = 'Name not given!'
elif not('email' in request.GET):
msg = 'Email not given!'
name = urllib.unquote(request.GET['username'])
email = urllib.unquote(request.GET['email'])
if not msg: msg = __validate_name__(name)
if not msg: msg = __validate_email__(email)
status = 'error' if msg else 'success'
return JsonResponse({"status": status, "message": msg})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
def __validate_name__(name):
''' Internal validation function for username '''
lname = len(name)
if lname < 5:
return 'User name must be at least 5 characters long'
if lname > 64:
return 'User name must not be longer than 64 characters'
if len(User.objects.filter(username=name)):
return 'User name already in use'
return ''
def __validate_email__(email):
''' Internal validation function for email '''
try:
validate_email(email)
except ValidationError:
return 'Invalid email address: '+email
return ''
| from django.shortcuts import render
from django.http import JsonResponse
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
return JsonResponse({"status": "error", "message": "Test Case"})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
Implement backend newaccount form verificationfrom django.http import JsonResponse
from django.contrib.auth.models import User
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.shortcuts import render
import urllib
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
msg = ''
if not('username' in request.GET):
msg = 'Name not given!'
elif not('email' in request.GET):
msg = 'Email not given!'
name = urllib.unquote(request.GET['username'])
email = urllib.unquote(request.GET['email'])
if not msg: msg = __validate_name__(name)
if not msg: msg = __validate_email__(email)
status = 'error' if msg else 'success'
return JsonResponse({"status": status, "message": msg})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
def __validate_name__(name):
''' Internal validation function for username '''
lname = len(name)
if lname < 5:
return 'User name must be at least 5 characters long'
if lname > 64:
return 'User name must not be longer than 64 characters'
if len(User.objects.filter(username=name)):
return 'User name already in use'
return ''
def __validate_email__(email):
''' Internal validation function for email '''
try:
validate_email(email)
except ValidationError:
return 'Invalid email address: '+email
return ''
| <commit_before>from django.shortcuts import render
from django.http import JsonResponse
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
return JsonResponse({"status": "error", "message": "Test Case"})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
<commit_msg>Implement backend newaccount form verification<commit_after>from django.http import JsonResponse
from django.contrib.auth.models import User
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from django.shortcuts import render
import urllib
import common.render
from common.settings import get_page_config
def form(request):
''' The signup form webpage '''
context = get_page_config(title='New User Sign Up')
context['form'] = [
{'label': 'User Name', 'name': 'username'},
{'label': 'Email Address', 'name': 'email'},
{'label': 'Password', 'type': 'password', 'name':'password'},
{'label': 'Re-enter Password', 'type': 'password', 'id':'repass'}
]
context['validators'] = [ 'signup/newaccount_validator.js' ]
return common.render.singleform(request, context)
def validate(request):
''' Signup form validation handler '''
msg = ''
if not('username' in request.GET):
msg = 'Name not given!'
elif not('email' in request.GET):
msg = 'Email not given!'
name = urllib.unquote(request.GET['username'])
email = urllib.unquote(request.GET['email'])
if not msg: msg = __validate_name__(name)
if not msg: msg = __validate_email__(email)
status = 'error' if msg else 'success'
return JsonResponse({"status": status, "message": msg})
def submit(request):
''' Signup form submission handler '''
return HttpResponse('')
def __validate_name__(name):
''' Internal validation function for username '''
lname = len(name)
if lname < 5:
return 'User name must be at least 5 characters long'
if lname > 64:
return 'User name must not be longer than 64 characters'
if len(User.objects.filter(username=name)):
return 'User name already in use'
return ''
def __validate_email__(email):
''' Internal validation function for email '''
try:
validate_email(email)
except ValidationError:
return 'Invalid email address: '+email
return ''
|
1e562decdc03295dec4cb37d26162e5d9aa31079 | neutron/tests/common/agents/l3_agent.py | neutron/tests/common/agents/l3_agent.py | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
| # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def __init__(self, host, conf=None):
super(TestL3NATAgent, self).__init__(host, conf)
self.event_observers.observers = set(
observer.__class__(self) for observer in
self.event_observers.observers)
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
| Update L3 agent drivers singletons to look at new agent | Update L3 agent drivers singletons to look at new agent
L3 agent drivers are singletons. They're created once, and hold
self.l3_agent. During testing, the agent is tossed away and
re-built, but the drivers singletons are pointing at the old
agent, and its old configuration.
Change-Id: Ie8a15318e71ea47cccad3b788751d914d51cbf18
Closes-Bug: #1404662
| Python | apache-2.0 | JianyuWang/neutron,SmartInfrastructures/neutron,eayunstack/neutron,skyddv/neutron,MaximNevrov/neutron,watonyweng/neutron,gkotton/neutron,openstack/neutron,mandeepdhami/neutron,glove747/liberty-neutron,projectcalico/calico-neutron,SamYaple/neutron,takeshineshiro/neutron,dims/neutron,watonyweng/neutron,miyakz1192/neutron,mandeepdhami/neutron,asgard-lab/neutron,mmnelemane/neutron,magic0704/neutron,bgxavier/neutron,mahak/neutron,gkotton/neutron,wenhuizhang/neutron,sasukeh/neutron,blueboxgroup/neutron,adelina-t/neutron,dhanunjaya/neutron,swdream/neutron,shahbazn/neutron,sebrandon1/neutron,Metaswitch/calico-neutron,vivekanand1101/neutron,swdream/neutron,antonioUnina/neutron,noironetworks/neutron,cisco-openstack/neutron,blueboxgroup/neutron,cloudbase/neutron-virtualbox,mmnelemane/neutron,huntxu/neutron,openstack/neutron,skyddv/neutron,eayunstack/neutron,NeCTAR-RC/neutron,projectcalico/calico-neutron,antonioUnina/neutron,apporc/neutron,SmartInfrastructures/neutron,alexandrucoman/vbox-neutron-agent,miyakz1192/neutron,suneeth51/neutron,JioCloud/neutron,barnsnake351/neutron,takeshineshiro/neutron,gkotton/neutron,mahak/neutron,noironetworks/neutron,cloudbase/neutron,mattt416/neutron,cisco-openstack/neutron,mahak/neutron,asgard-lab/neutron,sasukeh/neutron,glove747/liberty-neutron,klmitch/neutron,igor-toga/local-snat,paninetworks/neutron,Stavitsky/neutron,silenci/neutron,bigswitch/neutron,infobloxopen/neutron,dhanunjaya/neutron,vveerava/Openstack,vveerava/Openstack,adelina-t/neutron,shahbazn/neutron,MaximNevrov/neutron,cloudbase/neutron,pnavarro/neutron,bgxavier/neutron,alexandrucoman/vbox-neutron-agent,eonpatapon/neutron,magic0704/neutron,yuewko/neutron,blueboxgroup/neutron,wolverineav/neutron,chitr/neutron,wenhuizhang/neutron,jerryz1982/neutron,eonpatapon/neutron,barnsnake351/neutron,SamYaple/neutron,vivekanand1101/neutron,jumpojoy/neutron,huntxu/neutron,JianyuWang/neutron,paninetworks/neutron,waltBB/neutron_read,sebrandon1/neutron,JioCloud/neutron,bigswitch/neutron,javaos74/neutron,igor-toga/local-snat,jacknjzhou/neutron,infobloxopen/neutron,neoareslinux/neutron,yuewko/neutron,Stavitsky/neutron,wolverineav/neutron,aristanetworks/neutron,openstack/neutron,javaos74/neutron,neoareslinux/neutron,dims/neutron,jerryz1982/neutron,mattt416/neutron,chitr/neutron,yanheven/neutron,aristanetworks/neutron,klmitch/neutron,rdo-management/neutron,suneeth51/neutron,pnavarro/neutron,silenci/neutron,NeCTAR-RC/neutron,yanheven/neutron,jacknjzhou/neutron,cloudbase/neutron-virtualbox,waltBB/neutron_read,Metaswitch/calico-neutron,vveerava/Openstack,apporc/neutron,rdo-management/neutron,jumpojoy/neutron | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
Update L3 agent drivers singletons to look at new agent
L3 agent drivers are singletons. They're created once, and hold
self.l3_agent. During testing, the agent is tossed away and
re-built, but the drivers singletons are pointing at the old
agent, and its old configuration.
Change-Id: Ie8a15318e71ea47cccad3b788751d914d51cbf18
Closes-Bug: #1404662 | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def __init__(self, host, conf=None):
super(TestL3NATAgent, self).__init__(host, conf)
self.event_observers.observers = set(
observer.__class__(self) for observer in
self.event_observers.observers)
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
| <commit_before># Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
<commit_msg>Update L3 agent drivers singletons to look at new agent
L3 agent drivers are singletons. They're created once, and hold
self.l3_agent. During testing, the agent is tossed away and
re-built, but the drivers singletons are pointing at the old
agent, and its old configuration.
Change-Id: Ie8a15318e71ea47cccad3b788751d914d51cbf18
Closes-Bug: #1404662<commit_after> | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def __init__(self, host, conf=None):
super(TestL3NATAgent, self).__init__(host, conf)
self.event_observers.observers = set(
observer.__class__(self) for observer in
self.event_observers.observers)
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
| # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
Update L3 agent drivers singletons to look at new agent
L3 agent drivers are singletons. They're created once, and hold
self.l3_agent. During testing, the agent is tossed away and
re-built, but the drivers singletons are pointing at the old
agent, and its old configuration.
Change-Id: Ie8a15318e71ea47cccad3b788751d914d51cbf18
Closes-Bug: #1404662# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def __init__(self, host, conf=None):
super(TestL3NATAgent, self).__init__(host, conf)
self.event_observers.observers = set(
observer.__class__(self) for observer in
self.event_observers.observers)
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
| <commit_before># Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
<commit_msg>Update L3 agent drivers singletons to look at new agent
L3 agent drivers are singletons. They're created once, and hold
self.l3_agent. During testing, the agent is tossed away and
re-built, but the drivers singletons are pointing at the old
agent, and its old configuration.
Change-Id: Ie8a15318e71ea47cccad3b788751d914d51cbf18
Closes-Bug: #1404662<commit_after># Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.agent.l3 import agent
class TestL3NATAgent(agent.L3NATAgentWithStateReport):
NESTED_NAMESPACE_SEPARATOR = '@'
def __init__(self, host, conf=None):
super(TestL3NATAgent, self).__init__(host, conf)
self.event_observers.observers = set(
observer.__class__(self) for observer in
self.event_observers.observers)
def get_ns_name(self, router_id):
ns_name = super(TestL3NATAgent, self).get_ns_name(router_id)
return "%s%s%s" % (ns_name, self.NESTED_NAMESPACE_SEPARATOR, self.host)
def get_router_id(self, ns_name):
# 'ns_name' should be in the format of: 'qrouter-<id>@<host>'.
return super(TestL3NATAgent, self).get_router_id(
ns_name.split(self.NESTED_NAMESPACE_SEPARATOR)[0])
|
a2826203584c6f42b8e48a9eb9285d3a90983b98 | rts/urls.py | rts/urls.py | from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse('admin:index')), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
| from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('admin:index')),
name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
| Use reverse_lazy to avoid weird url setup circularity. | Use reverse_lazy to avoid weird url setup circularity.
| Python | bsd-3-clause | praekelt/go-rts-zambia | from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse('admin:index')), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
Use reverse_lazy to avoid weird url setup circularity. | from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('admin:index')),
name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
| <commit_before>from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse('admin:index')), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
<commit_msg>Use reverse_lazy to avoid weird url setup circularity.<commit_after> | from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('admin:index')),
name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
| from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse('admin:index')), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
Use reverse_lazy to avoid weird url setup circularity.from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('admin:index')),
name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
| <commit_before>from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse('admin:index')), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
<commit_msg>Use reverse_lazy to avoid weird url setup circularity.<commit_after>from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic.base import RedirectView
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url=reverse_lazy('admin:index')),
name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^', include('hierarchy.urls')),
url(r'^', include('data.urls')),
)
|
b4a2214d84884148760623eb655ac9e538b27370 | planterbox/tests/test_hooks/__init__.py | planterbox/tests/test_hooks/__init__.py | from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(world):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
| from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(test):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
| Clarify arguments in tests slightly | Clarify arguments in tests slightly
| Python | mit | npilon/planterbox | from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(world):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
Clarify arguments in tests slightly | from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(test):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
| <commit_before>from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(world):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
<commit_msg>Clarify arguments in tests slightly<commit_after> | from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(test):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
| from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(world):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
Clarify arguments in tests slightlyfrom planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(test):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
| <commit_before>from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(world):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(scenario_test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
<commit_msg>Clarify arguments in tests slightly<commit_after>from planterbox import (
step,
hook,
)
hooks_run = set()
@hook('before', 'feature')
def before_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('before', 'feature'))
@hook('before', 'scenario')
def before_scenario_hook(test):
global hooks_run
hooks_run.add(('before', 'scenario'))
@hook('before', 'step')
def before_step_hook(step_text):
global hooks_run
hooks_run.add(('before', 'step'))
@step(r'I verify that all before hooks have run')
def verify_before_hooks(test):
global hooks_run
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
}
@hook('after', 'feature')
def after_feature_hook(feature_suite):
global hooks_run
hooks_run.add(('after', 'feature'))
assert hooks_run == {('before', 'feature'),
('before', 'scenario'),
('before', 'step'),
('after', 'feature'),
('after', 'scenario'),
('after', 'step'),
}
@hook('after', 'scenario')
def after_scenario_hook(test):
global hooks_run
hooks_run.add(('after', 'scenario'))
@hook('after', 'step')
def after_step_hook(step_text):
global hooks_run
hooks_run.add(('after', 'step'))
|
229d1f1611f7372e43ae5f638b9fcb15fe395432 | notebooks/demo/services/common/tools.py | notebooks/demo/services/common/tools.py | import csv
import os
HERE = os.path.dirname(os.path.abspath(__file__))
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
yield elt
HAITI_DB = list(load_db())
| # -*- coding: utf-8 -*-
import csv
import os
import re
HERE = os.path.dirname(os.path.abspath(__file__))
def sexa_to_dec(dh, min, secs, sign):
return sign*(dh + float(min)/60 + float(secs)/60**2)
def string_to_dec(s, neg):
parsed = filter(
None, re.split('[\'" °]', unicode(s, 'utf-8')))
sign = -1 if parsed[-1] == neg else 1
return sexa_to_dec(float(parsed[0]), float(parsed[1]), float(parsed[2]),
sign)
def process_geo_coordinates(obj):
if obj['Latitude']:
obj['Latitude'] = string_to_dec(obj['Latitude'], 'S')
if obj['Longitude']:
obj['Longitude'] = string_to_dec(obj['Longitude'], 'W')
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
process_geo_coordinates(elt)
yield elt
HAITI_DB = list(load_db())
| Return geo coordinates in decimal | Return geo coordinates in decimal
| Python | mit | DesignSafe-CI/adama_example | import csv
import os
HERE = os.path.dirname(os.path.abspath(__file__))
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
yield elt
HAITI_DB = list(load_db())
Return geo coordinates in decimal | # -*- coding: utf-8 -*-
import csv
import os
import re
HERE = os.path.dirname(os.path.abspath(__file__))
def sexa_to_dec(dh, min, secs, sign):
return sign*(dh + float(min)/60 + float(secs)/60**2)
def string_to_dec(s, neg):
parsed = filter(
None, re.split('[\'" °]', unicode(s, 'utf-8')))
sign = -1 if parsed[-1] == neg else 1
return sexa_to_dec(float(parsed[0]), float(parsed[1]), float(parsed[2]),
sign)
def process_geo_coordinates(obj):
if obj['Latitude']:
obj['Latitude'] = string_to_dec(obj['Latitude'], 'S')
if obj['Longitude']:
obj['Longitude'] = string_to_dec(obj['Longitude'], 'W')
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
process_geo_coordinates(elt)
yield elt
HAITI_DB = list(load_db())
| <commit_before>import csv
import os
HERE = os.path.dirname(os.path.abspath(__file__))
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
yield elt
HAITI_DB = list(load_db())
<commit_msg>Return geo coordinates in decimal<commit_after> | # -*- coding: utf-8 -*-
import csv
import os
import re
HERE = os.path.dirname(os.path.abspath(__file__))
def sexa_to_dec(dh, min, secs, sign):
return sign*(dh + float(min)/60 + float(secs)/60**2)
def string_to_dec(s, neg):
parsed = filter(
None, re.split('[\'" °]', unicode(s, 'utf-8')))
sign = -1 if parsed[-1] == neg else 1
return sexa_to_dec(float(parsed[0]), float(parsed[1]), float(parsed[2]),
sign)
def process_geo_coordinates(obj):
if obj['Latitude']:
obj['Latitude'] = string_to_dec(obj['Latitude'], 'S')
if obj['Longitude']:
obj['Longitude'] = string_to_dec(obj['Longitude'], 'W')
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
process_geo_coordinates(elt)
yield elt
HAITI_DB = list(load_db())
| import csv
import os
HERE = os.path.dirname(os.path.abspath(__file__))
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
yield elt
HAITI_DB = list(load_db())
Return geo coordinates in decimal# -*- coding: utf-8 -*-
import csv
import os
import re
HERE = os.path.dirname(os.path.abspath(__file__))
def sexa_to_dec(dh, min, secs, sign):
return sign*(dh + float(min)/60 + float(secs)/60**2)
def string_to_dec(s, neg):
parsed = filter(
None, re.split('[\'" °]', unicode(s, 'utf-8')))
sign = -1 if parsed[-1] == neg else 1
return sexa_to_dec(float(parsed[0]), float(parsed[1]), float(parsed[2]),
sign)
def process_geo_coordinates(obj):
if obj['Latitude']:
obj['Latitude'] = string_to_dec(obj['Latitude'], 'S')
if obj['Longitude']:
obj['Longitude'] = string_to_dec(obj['Longitude'], 'W')
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
process_geo_coordinates(elt)
yield elt
HAITI_DB = list(load_db())
| <commit_before>import csv
import os
HERE = os.path.dirname(os.path.abspath(__file__))
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
yield elt
HAITI_DB = list(load_db())
<commit_msg>Return geo coordinates in decimal<commit_after># -*- coding: utf-8 -*-
import csv
import os
import re
HERE = os.path.dirname(os.path.abspath(__file__))
def sexa_to_dec(dh, min, secs, sign):
return sign*(dh + float(min)/60 + float(secs)/60**2)
def string_to_dec(s, neg):
parsed = filter(
None, re.split('[\'" °]', unicode(s, 'utf-8')))
sign = -1 if parsed[-1] == neg else 1
return sexa_to_dec(float(parsed[0]), float(parsed[1]), float(parsed[2]),
sign)
def process_geo_coordinates(obj):
if obj['Latitude']:
obj['Latitude'] = string_to_dec(obj['Latitude'], 'S')
if obj['Longitude']:
obj['Longitude'] = string_to_dec(obj['Longitude'], 'W')
def load_db():
with open(os.path.join(HERE, 'The_Haiti_Earthquake_Database.csv')) as f:
reader = csv.DictReader(f)
for elt in reader:
del elt['']
process_geo_coordinates(elt)
yield elt
HAITI_DB = list(load_db())
|
70aa7af1a5da51813a09da4f9671e293c4a01d91 | util/connection.py | util/connection.py | import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
DB_URL = os.environ.get('DB_URL')
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
| import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
AIRFLOW_CONN_MYSQL_TRACKER = os.environ.get('AIRFLOW_CONN_MYSQL_TRACKER')
if not AIRFLOW_CONN_MYSQL_TRACKER:
raise ValueError("AIRFLOW_CONN_MYSQL_TRACKER not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
| Add Mysql Tracker database to store our data | Add Mysql Tracker database to store our data
| Python | apache-2.0 | LREN-CHUV/data-factory-airflow-dags,LREN-CHUV/airflow-mri-preprocessing-dags,LREN-CHUV/data-factory-airflow-dags,LREN-CHUV/airflow-mri-preprocessing-dags | import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
DB_URL = os.environ.get('DB_URL')
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
Add Mysql Tracker database to store our data | import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
AIRFLOW_CONN_MYSQL_TRACKER = os.environ.get('AIRFLOW_CONN_MYSQL_TRACKER')
if not AIRFLOW_CONN_MYSQL_TRACKER:
raise ValueError("AIRFLOW_CONN_MYSQL_TRACKER not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
| <commit_before>import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
DB_URL = os.environ.get('DB_URL')
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
<commit_msg>Add Mysql Tracker database to store our data<commit_after> | import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
AIRFLOW_CONN_MYSQL_TRACKER = os.environ.get('AIRFLOW_CONN_MYSQL_TRACKER')
if not AIRFLOW_CONN_MYSQL_TRACKER:
raise ValueError("AIRFLOW_CONN_MYSQL_TRACKER not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
| import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
DB_URL = os.environ.get('DB_URL')
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
Add Mysql Tracker database to store our dataimport os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
AIRFLOW_CONN_MYSQL_TRACKER = os.environ.get('AIRFLOW_CONN_MYSQL_TRACKER')
if not AIRFLOW_CONN_MYSQL_TRACKER:
raise ValueError("AIRFLOW_CONN_MYSQL_TRACKER not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
| <commit_before>import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
DB_URL = os.environ.get('DB_URL')
if not DB_URL:
raise ValueError("DB_URL not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
<commit_msg>Add Mysql Tracker database to store our data<commit_after>import os
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.orm.scoping import scoped_session
from sqlalchemy.pool import NullPool
AIRFLOW_CONN_MYSQL_TRACKER = os.environ.get('AIRFLOW_CONN_MYSQL_TRACKER')
if not AIRFLOW_CONN_MYSQL_TRACKER:
raise ValueError("AIRFLOW_CONN_MYSQL_TRACKER not present in the environment")
Base = automap_base()
engine = create_engine(DB_URL, poolclass=NullPool)
Base.prepare(engine, reflect=True)
session_factory = sessionmaker(bind=engine, expire_on_commit=False)
Session = scoped_session(session_factory)
|
f8464d93ab56f7b8d46e430de4fe9b019117da4c | ofp/v0x01/controller2switch/flow_mod.py | ofp/v0x01/controller2switch/flow_mod.py | """Modifications to the flow table from the controller"""
# System imports
import enum
# Third-party imports
# Local source tree imports
from common import action
from common import flow_match
from common import header as of_header
from foundation import base
from foundation import basic_types
# Enums
class FlowModCommand(enum.Enum):
"""
List the possible commands for a flow.
Enums:
OFPFC_ADD # New Flow
OFPFC_MODIFY # Modify all flows
OFPFC_MODIFY_STRICT # Modify entry strictly matching wildcards
OFPFC_DELETE # Delete all matching flows
OFPFC_DELETE_STRICT # Strictly match wildcards and priority
"""
OFPFC_ADD = 1
OFPFC_MODIFY = 2
OFPFC_MODIFY_STRICT = 3
OFPFC_DELETE = 4
OFPFC_DELETE_STRICT = 5
# Classes
class FlowMod(base.GenericStruct):
"""
Modifies the flow table from the controller.
:param header -- OpenFlow header
:param match -- Fields to match
:param cookie -- Opaque controller-issued identifier
:param command -- One of OFPFC_*
:param idle_timeout -- Idle time before discarding (seconds)
:param hard_timeout -- Max time before discarding (seconds)
:param priority -- Priority level of flow entry
:param buffer_idle -- Buffered packet to apply to (or -1).
Not meaningful for OFPFC_DELETE*
:param out_port -- For OFPFC_DELETE* commands, require matching
entries to include this as an output port.
A value of OFPP_NONE indicates no restriction.
:param flags -- One of OFPFF_*
:param actions -- The action length is inferred from the length field
in the header
"""
header = of_header.OFPHeader()
match = flow_match.OFPMatch()
cookie = basic_types.UBInt64()
command = basic_types.UBInt16()
idle_timeout = basic_types.UBInt16()
hard_timeout = basic_types.UBInt16()
priority = basic_types.UBInt16()
buffer_id = basic_types.UBInt32()
out_port = basic_types.UBInt16()
flags = basic_types.UBInt16()
actions = action.ActionHeader()
def __init__(self, command=None, idle_timeout=None, hard_timeout=None,
priority=None, buffer_id=None, out_port=None, flags=None,
actions=None):
self.command=command
self.idle_timeout=idle_timeout
self.hard_timeout=hard_timeout
self.priority=priority
self.buffer_id=buffer_id
self.out_port=out_port
self.flags=flags
self.actions=actions
| Implement flow table modifications classes and enums | Implement flow table modifications classes and enums
| Python | mit | cemsbr/python-openflow,kytos/python-openflow | Implement flow table modifications classes and enums | """Modifications to the flow table from the controller"""
# System imports
import enum
# Third-party imports
# Local source tree imports
from common import action
from common import flow_match
from common import header as of_header
from foundation import base
from foundation import basic_types
# Enums
class FlowModCommand(enum.Enum):
"""
List the possible commands for a flow.
Enums:
OFPFC_ADD # New Flow
OFPFC_MODIFY # Modify all flows
OFPFC_MODIFY_STRICT # Modify entry strictly matching wildcards
OFPFC_DELETE # Delete all matching flows
OFPFC_DELETE_STRICT # Strictly match wildcards and priority
"""
OFPFC_ADD = 1
OFPFC_MODIFY = 2
OFPFC_MODIFY_STRICT = 3
OFPFC_DELETE = 4
OFPFC_DELETE_STRICT = 5
# Classes
class FlowMod(base.GenericStruct):
"""
Modifies the flow table from the controller.
:param header -- OpenFlow header
:param match -- Fields to match
:param cookie -- Opaque controller-issued identifier
:param command -- One of OFPFC_*
:param idle_timeout -- Idle time before discarding (seconds)
:param hard_timeout -- Max time before discarding (seconds)
:param priority -- Priority level of flow entry
:param buffer_idle -- Buffered packet to apply to (or -1).
Not meaningful for OFPFC_DELETE*
:param out_port -- For OFPFC_DELETE* commands, require matching
entries to include this as an output port.
A value of OFPP_NONE indicates no restriction.
:param flags -- One of OFPFF_*
:param actions -- The action length is inferred from the length field
in the header
"""
header = of_header.OFPHeader()
match = flow_match.OFPMatch()
cookie = basic_types.UBInt64()
command = basic_types.UBInt16()
idle_timeout = basic_types.UBInt16()
hard_timeout = basic_types.UBInt16()
priority = basic_types.UBInt16()
buffer_id = basic_types.UBInt32()
out_port = basic_types.UBInt16()
flags = basic_types.UBInt16()
actions = action.ActionHeader()
def __init__(self, command=None, idle_timeout=None, hard_timeout=None,
priority=None, buffer_id=None, out_port=None, flags=None,
actions=None):
self.command=command
self.idle_timeout=idle_timeout
self.hard_timeout=hard_timeout
self.priority=priority
self.buffer_id=buffer_id
self.out_port=out_port
self.flags=flags
self.actions=actions
| <commit_before><commit_msg>Implement flow table modifications classes and enums<commit_after> | """Modifications to the flow table from the controller"""
# System imports
import enum
# Third-party imports
# Local source tree imports
from common import action
from common import flow_match
from common import header as of_header
from foundation import base
from foundation import basic_types
# Enums
class FlowModCommand(enum.Enum):
"""
List the possible commands for a flow.
Enums:
OFPFC_ADD # New Flow
OFPFC_MODIFY # Modify all flows
OFPFC_MODIFY_STRICT # Modify entry strictly matching wildcards
OFPFC_DELETE # Delete all matching flows
OFPFC_DELETE_STRICT # Strictly match wildcards and priority
"""
OFPFC_ADD = 1
OFPFC_MODIFY = 2
OFPFC_MODIFY_STRICT = 3
OFPFC_DELETE = 4
OFPFC_DELETE_STRICT = 5
# Classes
class FlowMod(base.GenericStruct):
"""
Modifies the flow table from the controller.
:param header -- OpenFlow header
:param match -- Fields to match
:param cookie -- Opaque controller-issued identifier
:param command -- One of OFPFC_*
:param idle_timeout -- Idle time before discarding (seconds)
:param hard_timeout -- Max time before discarding (seconds)
:param priority -- Priority level of flow entry
:param buffer_idle -- Buffered packet to apply to (or -1).
Not meaningful for OFPFC_DELETE*
:param out_port -- For OFPFC_DELETE* commands, require matching
entries to include this as an output port.
A value of OFPP_NONE indicates no restriction.
:param flags -- One of OFPFF_*
:param actions -- The action length is inferred from the length field
in the header
"""
header = of_header.OFPHeader()
match = flow_match.OFPMatch()
cookie = basic_types.UBInt64()
command = basic_types.UBInt16()
idle_timeout = basic_types.UBInt16()
hard_timeout = basic_types.UBInt16()
priority = basic_types.UBInt16()
buffer_id = basic_types.UBInt32()
out_port = basic_types.UBInt16()
flags = basic_types.UBInt16()
actions = action.ActionHeader()
def __init__(self, command=None, idle_timeout=None, hard_timeout=None,
priority=None, buffer_id=None, out_port=None, flags=None,
actions=None):
self.command=command
self.idle_timeout=idle_timeout
self.hard_timeout=hard_timeout
self.priority=priority
self.buffer_id=buffer_id
self.out_port=out_port
self.flags=flags
self.actions=actions
| Implement flow table modifications classes and enums"""Modifications to the flow table from the controller"""
# System imports
import enum
# Third-party imports
# Local source tree imports
from common import action
from common import flow_match
from common import header as of_header
from foundation import base
from foundation import basic_types
# Enums
class FlowModCommand(enum.Enum):
"""
List the possible commands for a flow.
Enums:
OFPFC_ADD # New Flow
OFPFC_MODIFY # Modify all flows
OFPFC_MODIFY_STRICT # Modify entry strictly matching wildcards
OFPFC_DELETE # Delete all matching flows
OFPFC_DELETE_STRICT # Strictly match wildcards and priority
"""
OFPFC_ADD = 1
OFPFC_MODIFY = 2
OFPFC_MODIFY_STRICT = 3
OFPFC_DELETE = 4
OFPFC_DELETE_STRICT = 5
# Classes
class FlowMod(base.GenericStruct):
"""
Modifies the flow table from the controller.
:param header -- OpenFlow header
:param match -- Fields to match
:param cookie -- Opaque controller-issued identifier
:param command -- One of OFPFC_*
:param idle_timeout -- Idle time before discarding (seconds)
:param hard_timeout -- Max time before discarding (seconds)
:param priority -- Priority level of flow entry
:param buffer_idle -- Buffered packet to apply to (or -1).
Not meaningful for OFPFC_DELETE*
:param out_port -- For OFPFC_DELETE* commands, require matching
entries to include this as an output port.
A value of OFPP_NONE indicates no restriction.
:param flags -- One of OFPFF_*
:param actions -- The action length is inferred from the length field
in the header
"""
header = of_header.OFPHeader()
match = flow_match.OFPMatch()
cookie = basic_types.UBInt64()
command = basic_types.UBInt16()
idle_timeout = basic_types.UBInt16()
hard_timeout = basic_types.UBInt16()
priority = basic_types.UBInt16()
buffer_id = basic_types.UBInt32()
out_port = basic_types.UBInt16()
flags = basic_types.UBInt16()
actions = action.ActionHeader()
def __init__(self, command=None, idle_timeout=None, hard_timeout=None,
priority=None, buffer_id=None, out_port=None, flags=None,
actions=None):
self.command=command
self.idle_timeout=idle_timeout
self.hard_timeout=hard_timeout
self.priority=priority
self.buffer_id=buffer_id
self.out_port=out_port
self.flags=flags
self.actions=actions
| <commit_before><commit_msg>Implement flow table modifications classes and enums<commit_after>"""Modifications to the flow table from the controller"""
# System imports
import enum
# Third-party imports
# Local source tree imports
from common import action
from common import flow_match
from common import header as of_header
from foundation import base
from foundation import basic_types
# Enums
class FlowModCommand(enum.Enum):
"""
List the possible commands for a flow.
Enums:
OFPFC_ADD # New Flow
OFPFC_MODIFY # Modify all flows
OFPFC_MODIFY_STRICT # Modify entry strictly matching wildcards
OFPFC_DELETE # Delete all matching flows
OFPFC_DELETE_STRICT # Strictly match wildcards and priority
"""
OFPFC_ADD = 1
OFPFC_MODIFY = 2
OFPFC_MODIFY_STRICT = 3
OFPFC_DELETE = 4
OFPFC_DELETE_STRICT = 5
# Classes
class FlowMod(base.GenericStruct):
"""
Modifies the flow table from the controller.
:param header -- OpenFlow header
:param match -- Fields to match
:param cookie -- Opaque controller-issued identifier
:param command -- One of OFPFC_*
:param idle_timeout -- Idle time before discarding (seconds)
:param hard_timeout -- Max time before discarding (seconds)
:param priority -- Priority level of flow entry
:param buffer_idle -- Buffered packet to apply to (or -1).
Not meaningful for OFPFC_DELETE*
:param out_port -- For OFPFC_DELETE* commands, require matching
entries to include this as an output port.
A value of OFPP_NONE indicates no restriction.
:param flags -- One of OFPFF_*
:param actions -- The action length is inferred from the length field
in the header
"""
header = of_header.OFPHeader()
match = flow_match.OFPMatch()
cookie = basic_types.UBInt64()
command = basic_types.UBInt16()
idle_timeout = basic_types.UBInt16()
hard_timeout = basic_types.UBInt16()
priority = basic_types.UBInt16()
buffer_id = basic_types.UBInt32()
out_port = basic_types.UBInt16()
flags = basic_types.UBInt16()
actions = action.ActionHeader()
def __init__(self, command=None, idle_timeout=None, hard_timeout=None,
priority=None, buffer_id=None, out_port=None, flags=None,
actions=None):
self.command=command
self.idle_timeout=idle_timeout
self.hard_timeout=hard_timeout
self.priority=priority
self.buffer_id=buffer_id
self.out_port=out_port
self.flags=flags
self.actions=actions
| |
6934b792deaad42eb8ab856d1e0420b9a88a8c41 | utility/util.py | utility/util.py | # Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%d %H:%M:%S').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
| # Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
| Modify parsing logic for last_modified in JSON | Modify parsing logic for last_modified in JSON
| Python | mit | CMUPracticum/TrailScribe,CMUPracticum/TrailScribeServer,CMUPracticum/TrailScribe,CMUPracticum/TrailScribeServer,CMUPracticum/TrailScribe | # Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%d %H:%M:%S').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
Modify parsing logic for last_modified in JSON | # Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
| <commit_before># Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%d %H:%M:%S').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
<commit_msg>Modify parsing logic for last_modified in JSON<commit_after> | # Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
| # Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%d %H:%M:%S').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
Modify parsing logic for last_modified in JSON# Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
| <commit_before># Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%d %H:%M:%S').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
<commit_msg>Modify parsing logic for last_modified in JSON<commit_after># Stdlib imports
from datetime import datetime
from pytz import timezone
# Core Django imports
from django.utils.timezone import utc
# Imports from app
from sync_center.models import Map, KML
def get_update_id_list(model_name, req_data):
db_data = None
if model_name == 'map':
db_data = Map.objects.all()
elif model_name == 'kml':
db_data = KML.objects.all()
id_list = []
for data in db_data:
id_str = str(data.id)
if id_str not in req_data.keys():
id_list.append(data.id)
else:
req_last_modified = datetime.strptime(req_data[id_str]['last_modified'], '%Y-%m-%dT%H:%M:%SZ').replace(tzinfo = utc)
if data.last_modified > req_last_modified:
id_list.append(data.id)
return id_list
|
a66c6d3b9c3453f4ea5a4352de17bb83c75776a5 | settings.py | settings.py | #
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 100
| #
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 101.9
| Update x and y stage limits according to Orbis stage calibration. | Update x and y stage limits according to Orbis stage calibration.
| Python | mit | dngv/JCAPOrbisAlign | #
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 100
Update x and y stage limits according to Orbis stage calibration. | #
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 101.9
| <commit_before>#
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 100
<commit_msg>Update x and y stage limits according to Orbis stage calibration.<commit_after> | #
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 101.9
| #
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 100
Update x and y stage limits according to Orbis stage calibration.#
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 101.9
| <commit_before>#
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 100
<commit_msg>Update x and y stage limits according to Orbis stage calibration.<commit_after>#
platedir = 'J:\\hte_jcap_app_proto\\plate'
mapdir = 'J:\\hte_jcap_app_proto\\map'
rundir = 'C:\\INST\\RUNS'
stagx_min = 0
stagx_max = 101.9
stagy_min = 0
stagy_max = 101.9
|
07ef73f98e85919863af43f9c50bde85a143660d | conf_site/reviews/admin.py | conf_site/reviews/admin.py | from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ("score",)
| from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ["score", "voter"]
| Enable filtering ProposalVotes by reviewer. | Enable filtering ProposalVotes by reviewer.
| Python | mit | pydata/conf_site,pydata/conf_site,pydata/conf_site | from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ("score",)
Enable filtering ProposalVotes by reviewer. | from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ["score", "voter"]
| <commit_before>from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ("score",)
<commit_msg>Enable filtering ProposalVotes by reviewer.<commit_after> | from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ["score", "voter"]
| from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ("score",)
Enable filtering ProposalVotes by reviewer.from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ["score", "voter"]
| <commit_before>from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ("score",)
<commit_msg>Enable filtering ProposalVotes by reviewer.<commit_after>from django.contrib import admin
from conf_site.reviews.models import (
ProposalFeedback,
ProposalNotification,
ProposalResult,
ProposalVote,
)
class ProposalInline(admin.StackedInline):
model = ProposalNotification.proposals.through
@admin.register(ProposalFeedback)
class ProposalFeedbackAdmin(admin.ModelAdmin):
list_display = ("proposal", "author", "comment", "date_created")
@admin.register(ProposalNotification)
class ProposalNotificationAdmin(admin.ModelAdmin):
exclude = ("proposals",)
inlines = [ProposalInline]
list_display = ("subject", "body", "date_sent")
@admin.register(ProposalResult)
class ProposalResultAdmin(admin.ModelAdmin):
list_display = ("proposal", "status")
@admin.register(ProposalVote)
class ProposalVoteAdmin(admin.ModelAdmin):
list_display = ("proposal", "voter", "score", "comment")
list_filter = ["score", "voter"]
|
ff65853def5bf1044fe457362f85b8aecca66152 | tests/laser/transaction/create.py | tests/laser/transaction/create.py | import mythril.laser.ethereum.transaction as transaction
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
laser_evm.execute_contract_creation(contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
| from mythril.laser.ethereum.transaction import execute_contract_creation
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
execute_contract_creation(laser_evm, contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
| Update test to reflect the refactor | Update test to reflect the refactor
| Python | mit | b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril | import mythril.laser.ethereum.transaction as transaction
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
laser_evm.execute_contract_creation(contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
Update test to reflect the refactor | from mythril.laser.ethereum.transaction import execute_contract_creation
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
execute_contract_creation(laser_evm, contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
| <commit_before>import mythril.laser.ethereum.transaction as transaction
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
laser_evm.execute_contract_creation(contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
<commit_msg>Update test to reflect the refactor<commit_after> | from mythril.laser.ethereum.transaction import execute_contract_creation
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
execute_contract_creation(laser_evm, contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
| import mythril.laser.ethereum.transaction as transaction
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
laser_evm.execute_contract_creation(contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
Update test to reflect the refactorfrom mythril.laser.ethereum.transaction import execute_contract_creation
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
execute_contract_creation(laser_evm, contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
| <commit_before>import mythril.laser.ethereum.transaction as transaction
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
laser_evm.execute_contract_creation(contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
<commit_msg>Update test to reflect the refactor<commit_after>from mythril.laser.ethereum.transaction import execute_contract_creation
from mythril.ether import util
import mythril.laser.ethereum.svm as svm
from mythril.disassembler.disassembly import Disassembly
from datetime import datetime
from mythril.ether.soliditycontract import SolidityContract
import tests
from mythril.analysis.security import fire_lasers
from mythril.analysis.symbolic import SymExecWrapper
def test_create():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
laser_evm = svm.LaserEVM({})
laser_evm.time = datetime.now()
execute_contract_creation(laser_evm, contract.creation_code)
resulting_final_state = laser_evm.open_states[0]
for address, created_account in resulting_final_state.accounts.items():
created_account_code = created_account.code
actual_code = Disassembly(contract.code)
for i in range(len(created_account_code.instruction_list)):
found_instruction = created_account_code.instruction_list[i]
actual_instruction = actual_code.instruction_list[i]
assert found_instruction['opcode'] == actual_instruction['opcode']
def test_sym_exec():
contract = SolidityContract(str(tests.TESTDATA_INPUTS_CONTRACTS / 'calls.sol'))
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
issues = fire_lasers(sym)
assert len(issues) != 0
|
d41d0c15661be517d761e7d6bae2be17495b0f6e | src/deps.py | src/deps.py | # Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 96732
| # Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 108801
| Update frontend to chrome r108801 | Update frontend to chrome r108801
| Python | apache-2.0 | natduca/trace_event_viewer,natduca/trace_event_viewer,natduca/trace_event_viewer | # Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 96732
Update frontend to chrome r108801 | # Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 108801
| <commit_before># Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 96732
<commit_msg>Update frontend to chrome r108801<commit_after> | # Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 108801
| # Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 96732
Update frontend to chrome r108801# Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 108801
| <commit_before># Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 96732
<commit_msg>Update frontend to chrome r108801<commit_after># Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CHROME_SVN_BASE = 'http://src.chromium.org/svn/trunk/src/'
CHROME_SVN_REV = 108801
|
77e4fb7ef74bcfd58b548cca8ec9898eb936e7ef | conanfile.py | conanfile.py | from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
default_options = (
'desa:build_tests=False'
)
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
| from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
| Remove default option for `desa` | Remove default option for `desa`
| Python | bsd-3-clause | jason2506/esapp,jason2506/esapp | from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
default_options = (
'desa:build_tests=False'
)
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
Remove default option for `desa` | from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
| <commit_before>from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
default_options = (
'desa:build_tests=False'
)
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
<commit_msg>Remove default option for `desa`<commit_after> | from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
| from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
default_options = (
'desa:build_tests=False'
)
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
Remove default option for `desa`from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
| <commit_before>from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
default_options = (
'desa:build_tests=False'
)
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
<commit_msg>Remove default option for `desa`<commit_after>from conans import ConanFile, CMake
class EsappConan(ConanFile):
name = 'esapp'
version = '0.4.1'
url = 'https://github.com/jason2506/esapp'
license = 'BSD 3-Clause'
author = 'Chi-En Wu'
requires = 'desa/0.1.0@jason2506/testing'
settings = 'os', 'compiler', 'build_type', 'arch'
generators = 'cmake'
exports = (
'CMakeLists.txt',
'cmake/*.cmake',
'include/*.hpp'
)
def build(self):
cmake = CMake(self.settings)
args = []
args.append('-DENABLE_CONAN=%s' % self.options.enable_conan)
args.append('-DBUILD_TESTING=%s' % self.options.build_tests)
args.append('-DCMAKE_INSTALL_PREFIX="%s"' % self.package_folder)
self.run('cmake "%s" %s %s' % (
self.conanfile_directory,
cmake.command_line,
' '.join(args)
))
self.run('cmake --build .')
def package(self):
cmake = CMake(self.settings)
self.run('cmake --build . --target install %s' % cmake.build_config)
|
5b5f891b6ee714966eefed1adfbd366eb078210f | webpack_resolve.py | webpack_resolve.py | import json
import os
import wiki
PROJECT_ROOT_DIRECTORY = os.path.dirname(globals()['__file__'])
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(os.path.join(PROJECT_ROOT_DIRECTORY, 'webpack-extra-resolve.json'), 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
| import json
import os
import wiki
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
WEBPACK_RESOLVE_FILE = 'webpack-extra-resolve.json'
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(WEBPACK_RESOLVE_FILE, 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
| Remove unnecessary project root variable from webpack resolve script | Remove unnecessary project root variable from webpack resolve script
| Python | mit | dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4 | import json
import os
import wiki
PROJECT_ROOT_DIRECTORY = os.path.dirname(globals()['__file__'])
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(os.path.join(PROJECT_ROOT_DIRECTORY, 'webpack-extra-resolve.json'), 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
Remove unnecessary project root variable from webpack resolve script | import json
import os
import wiki
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
WEBPACK_RESOLVE_FILE = 'webpack-extra-resolve.json'
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(WEBPACK_RESOLVE_FILE, 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
| <commit_before>import json
import os
import wiki
PROJECT_ROOT_DIRECTORY = os.path.dirname(globals()['__file__'])
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(os.path.join(PROJECT_ROOT_DIRECTORY, 'webpack-extra-resolve.json'), 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
<commit_msg>Remove unnecessary project root variable from webpack resolve script<commit_after> | import json
import os
import wiki
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
WEBPACK_RESOLVE_FILE = 'webpack-extra-resolve.json'
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(WEBPACK_RESOLVE_FILE, 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
| import json
import os
import wiki
PROJECT_ROOT_DIRECTORY = os.path.dirname(globals()['__file__'])
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(os.path.join(PROJECT_ROOT_DIRECTORY, 'webpack-extra-resolve.json'), 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
Remove unnecessary project root variable from webpack resolve scriptimport json
import os
import wiki
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
WEBPACK_RESOLVE_FILE = 'webpack-extra-resolve.json'
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(WEBPACK_RESOLVE_FILE, 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
| <commit_before>import json
import os
import wiki
PROJECT_ROOT_DIRECTORY = os.path.dirname(globals()['__file__'])
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(os.path.join(PROJECT_ROOT_DIRECTORY, 'webpack-extra-resolve.json'), 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
<commit_msg>Remove unnecessary project root variable from webpack resolve script<commit_after>import json
import os
import wiki
DJANGO_WIKI_STATIC = os.path.join(os.path.dirname(wiki.__file__), 'static')
WEBPACK_RESOLVE_FILE = 'webpack-extra-resolve.json'
# This whole file is essentially just a big ugly hack.
# For webpack to properly build wiki static files it needs the absolute path to the wiki
# static folder. And since we're using virtualenvs there is no easy way to find this folder
# without running python code
def create_resolve_file():
# Write to json file which will be read by webpack
with open(WEBPACK_RESOLVE_FILE, 'w') as f:
f.write(json.dumps({
'paths': [
DJANGO_WIKI_STATIC
]
}))
if __name__ == '__main__':
# Only run if file is executed directly
create_resolve_file()
|
fdf05b0fa93c350d2cd030e451b0e26ed7393209 | tests/clientlib/validate_manifest_test.py | tests/clientlib/validate_manifest_test.py |
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj) |
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected | Add better tests for manifest json schema | Add better tests for manifest json schema
| Python | mit | chriskuehl/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,beni55/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,Lucas-C/pre-commit,Lucas-C/pre-commit,dnephin/pre-commit,philipgian/pre-commit,dnephin/pre-commit,Teino1978-Corp/pre-commit,philipgian/pre-commit,chriskuehl/pre-commit,chriskuehl/pre-commit-1,dnephin/pre-commit,Teino1978-Corp/pre-commit,Lucas-C/pre-commit,barrysteyn/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,pre-commit/pre-commit,beni55/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,Lucas-C/pre-commit,Teino1978-Corp/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,beni55/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit-1,Lucas-C/pre-commit,beni55/pre-commit,barrysteyn/pre-commit,pre-commit/pre-commit,chriskuehl/pre-commit,chriskuehl/pre-commit-1,pre-commit/pre-commit,pre-commit/pre-commit,barrysteyn/pre-commit,chriskuehl/pre-commit,pre-commit/pre-commit,philipgian/pre-commit,dnephin/pre-commit,pre-commit/pre-commit |
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)Add better tests for manifest json schema |
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected | <commit_before>
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)<commit_msg>Add better tests for manifest json schema<commit_after> |
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected |
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)Add better tests for manifest json schema
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected | <commit_before>
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)<commit_msg>Add better tests for manifest json schema<commit_after>
import jsonschema
import jsonschema.exceptions
import pytest
from pre_commit.clientlib.validate_manifest import additional_manifest_check
from pre_commit.clientlib.validate_manifest import InvalidManifestError
from pre_commit.clientlib.validate_manifest import MANIFEST_JSON_SCHEMA
from pre_commit.clientlib.validate_manifest import run
def test_returns_0_for_valid_manifest():
assert run(['example_manifest.yaml']) == 0
def test_returns_0_for_our_manifest():
assert run([]) == 0
def test_returns_1_for_failing():
assert run(['tests/data/valid_yaml_but_invalid_manifest.yaml']) == 1
def test_additional_manifest_check_raises_for_bad_language():
with pytest.raises(InvalidManifestError):
additional_manifest_check([{'id': 'foo', 'language': 'not valid'}])
@pytest.mark.parametrize(('obj'), (
[{}],
[{'language': 'python'}],
[{'language': 'python>2.6'}],
))
def test_additional_manifest_check_is_ok_with_missing_language(obj):
additional_manifest_check(obj)
def is_valid_according_to_schema(obj, schema):
try:
jsonschema.validate(obj, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
@pytest.mark.parametrize(('manifest_obj', 'expected'), (
([], False),
([{'id': 'a', 'name': 'b', 'entry': 'c'}], True),
(
[{
'id': 'a',
'name': 'b',
'entry': 'c',
'language': 'python',
'expected_return_value': 0,
}],
True,
),
))
def test_is_valid_according_to_schema(manifest_obj, expected):
ret = is_valid_according_to_schema(manifest_obj, MANIFEST_JSON_SCHEMA)
assert ret is expected |
8da5356b2a08679cbf61cff21db2068980866701 | scripts/master/factory/dart/channels.py | scripts/master/factory/dart/channels.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.5', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.6', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| Update stable channel builders to 1.6 branch | Update stable channel builders to 1.6 branch
Review URL: https://codereview.chromium.org/494783003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291643 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.5', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
Update stable channel builders to 1.6 branch
Review URL: https://codereview.chromium.org/494783003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291643 0039d316-1c4b-4281-b951-d872f2087c98 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.6', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| <commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.5', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
<commit_msg>Update stable channel builders to 1.6 branch
Review URL: https://codereview.chromium.org/494783003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291643 0039d316-1c4b-4281-b951-d872f2087c98<commit_after> | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.6', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.5', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
Update stable channel builders to 1.6 branch
Review URL: https://codereview.chromium.org/494783003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291643 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.6', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| <commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.5', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
<commit_msg>Update stable channel builders to 1.6 branch
Review URL: https://codereview.chromium.org/494783003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@291643 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.6', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
70ef413e0e43103877fc94cdfebd11002e6cbcbd | scripts/master/factory/dart/channels.py | scripts/master/factory/dart/channels.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.1', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| Update stable channel to 1.1 | Update stable channel to 1.1
Review URL: https://codereview.chromium.org/138273002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@244706 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
Update stable channel to 1.1
Review URL: https://codereview.chromium.org/138273002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@244706 0039d316-1c4b-4281-b951-d872f2087c98 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.1', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| <commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
<commit_msg>Update stable channel to 1.1
Review URL: https://codereview.chromium.org/138273002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@244706 0039d316-1c4b-4281-b951-d872f2087c98<commit_after> | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.1', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
Update stable channel to 1.1
Review URL: https://codereview.chromium.org/138273002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@244706 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.1', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| <commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.0', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
<commit_msg>Update stable channel to 1.1
Review URL: https://codereview.chromium.org/138273002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@244706 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.1', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
ba93ea71b87c95f4d52c85ae652496ebfb012e1f | pupa/importers/memberships.py | pupa/importers/memberships.py | from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
| from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
if 'unmatched_legislator' in membership:
spec['unmatched_legislator'] = membership['unmatched_legislator']
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
| Add unmatched_legislator to the spec | Add unmatched_legislator to the spec
| Python | bsd-3-clause | datamade/pupa,datamade/pupa,rshorey/pupa,mileswwatkins/pupa,rshorey/pupa,mileswwatkins/pupa,opencivicdata/pupa,influence-usa/pupa,influence-usa/pupa,opencivicdata/pupa | from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
Add unmatched_legislator to the spec | from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
if 'unmatched_legislator' in membership:
spec['unmatched_legislator'] = membership['unmatched_legislator']
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
| <commit_before>from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
<commit_msg>Add unmatched_legislator to the spec<commit_after> | from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
if 'unmatched_legislator' in membership:
spec['unmatched_legislator'] = membership['unmatched_legislator']
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
| from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
Add unmatched_legislator to the specfrom .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
if 'unmatched_legislator' in membership:
spec['unmatched_legislator'] = membership['unmatched_legislator']
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
| <commit_before>from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
<commit_msg>Add unmatched_legislator to the spec<commit_after>from .base import BaseImporter
class MembershipImporter(BaseImporter):
_type = 'membership'
def __init__(self, jurisdiction_id, person_importer, org_importer):
super(MembershipImporter, self).__init__(jurisdiction_id)
self.person_importer = person_importer
self.org_importer = org_importer
def get_db_spec(self, membership):
spec = {'organization_id': membership['organization_id'],
'person_id': membership['person_id'],
'role': membership['role'],
# if this is a historical role, only update historical roles
'end_date': membership.get('end_date')
}
if 'unmatched_legislator' in membership:
spec['unmatched_legislator'] = membership['unmatched_legislator']
return spec
def prepare_object_from_json(self, obj):
org_json_id = obj['organization_id']
obj['organization_id'] = self.org_importer.resolve_json_id(org_json_id)
person_json_id = obj['person_id']
obj['person_id'] = self.person_importer.resolve_json_id(person_json_id)
return obj
|
8a534a9927ac0050b3182243c2b8bbf59127549e | test/multiple_invocations_test.py | test/multiple_invocations_test.py | # Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_immediate():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
| # Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_same_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
def test_multiple_invocations_new_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
| Test two flow invocations after each other | Test two flow invocations after each other
| Python | bsd-3-clause | lechat/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow | # Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_immediate():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
Test two flow invocations after each other | # Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_same_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
def test_multiple_invocations_new_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
| <commit_before># Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_immediate():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
<commit_msg>Test two flow invocations after each other<commit_after> | # Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_same_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
def test_multiple_invocations_new_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
| # Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_immediate():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
Test two flow invocations after each other# Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_same_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
def test_multiple_invocations_new_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
| <commit_before># Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_immediate():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
<commit_msg>Test two flow invocations after each other<commit_after># Copyright (c) 2012 - 2014 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import mock_api
def test_multiple_invocations_same_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
def test_multiple_invocations_new_flow():
with mock_api.api(__file__) as api:
api.flow_job()
_params = (('password', '', 'Some password'), ('s1', '', 'Some string argument'))
api.job('j1', exec_time=0.01, max_fails=0, expect_invocations=2, expect_order=1, params=_params)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='a', s1='b')
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, report_interval=1) as ctrl1:
ctrl1.invoke('j1', password='something else', s1='asdasdasdasdad')
|
dd7682dd12333b9fec63a112a0484e9391937041 | tests/cputestdata/cpu-reformat.py | tests/cputestdata/cpu-reformat.py | #!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("\n")
| #!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("")
| Make sure generated files pass syntax-check | cputest: Make sure generated files pass syntax-check
The tests/cputestdata/cpu-parse.sh would produce JSON files with QEMU
replies which wouldn't pass syntax-check. Let's fix this by not emitting
an extra new line after reformatting the JSON file.
Signed-off-by: Jiri Denemark <62bdf77dc47919a4d59a91822129d14633cfca81@redhat.com>
Reviewed-by: Ján Tomko <4cab11cfb98d3c937327354a78eb07dbb6ee2bc6@redhat.com>
| Python | lgpl-2.1 | olafhering/libvirt,andreabolognani/libvirt,eskultety/libvirt,zippy2/libvirt,andreabolognani/libvirt,jfehlig/libvirt,zippy2/libvirt,jfehlig/libvirt,olafhering/libvirt,zippy2/libvirt,olafhering/libvirt,libvirt/libvirt,crobinso/libvirt,fabianfreyer/libvirt,jardasgit/libvirt,fabianfreyer/libvirt,crobinso/libvirt,nertpinx/libvirt,fabianfreyer/libvirt,libvirt/libvirt,eskultety/libvirt,zippy2/libvirt,jfehlig/libvirt,nertpinx/libvirt,jardasgit/libvirt,fabianfreyer/libvirt,nertpinx/libvirt,libvirt/libvirt,eskultety/libvirt,olafhering/libvirt,jardasgit/libvirt,jfehlig/libvirt,eskultety/libvirt,nertpinx/libvirt,andreabolognani/libvirt,fabianfreyer/libvirt,jardasgit/libvirt,nertpinx/libvirt,jardasgit/libvirt,crobinso/libvirt,libvirt/libvirt,andreabolognani/libvirt,andreabolognani/libvirt,eskultety/libvirt,crobinso/libvirt | #!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("\n")
cputest: Make sure generated files pass syntax-check
The tests/cputestdata/cpu-parse.sh would produce JSON files with QEMU
replies which wouldn't pass syntax-check. Let's fix this by not emitting
an extra new line after reformatting the JSON file.
Signed-off-by: Jiri Denemark <62bdf77dc47919a4d59a91822129d14633cfca81@redhat.com>
Reviewed-by: Ján Tomko <4cab11cfb98d3c937327354a78eb07dbb6ee2bc6@redhat.com> | #!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("")
| <commit_before>#!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("\n")
<commit_msg>cputest: Make sure generated files pass syntax-check
The tests/cputestdata/cpu-parse.sh would produce JSON files with QEMU
replies which wouldn't pass syntax-check. Let's fix this by not emitting
an extra new line after reformatting the JSON file.
Signed-off-by: Jiri Denemark <62bdf77dc47919a4d59a91822129d14633cfca81@redhat.com>
Reviewed-by: Ján Tomko <4cab11cfb98d3c937327354a78eb07dbb6ee2bc6@redhat.com><commit_after> | #!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("")
| #!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("\n")
cputest: Make sure generated files pass syntax-check
The tests/cputestdata/cpu-parse.sh would produce JSON files with QEMU
replies which wouldn't pass syntax-check. Let's fix this by not emitting
an extra new line after reformatting the JSON file.
Signed-off-by: Jiri Denemark <62bdf77dc47919a4d59a91822129d14633cfca81@redhat.com>
Reviewed-by: Ján Tomko <4cab11cfb98d3c937327354a78eb07dbb6ee2bc6@redhat.com>#!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("")
| <commit_before>#!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("\n")
<commit_msg>cputest: Make sure generated files pass syntax-check
The tests/cputestdata/cpu-parse.sh would produce JSON files with QEMU
replies which wouldn't pass syntax-check. Let's fix this by not emitting
an extra new line after reformatting the JSON file.
Signed-off-by: Jiri Denemark <62bdf77dc47919a4d59a91822129d14633cfca81@redhat.com>
Reviewed-by: Ján Tomko <4cab11cfb98d3c937327354a78eb07dbb6ee2bc6@redhat.com><commit_after>#!/usr/bin/env python3
import sys
import json
dec = json.JSONDecoder()
data, pos = dec.raw_decode(sys.stdin.read())
json.dump(data, sys.stdout, indent=2, separators=(',', ': '))
print("")
|
fc7db2a55ad3f612ac6ef01cfa57ce03040708a5 | evelink/__init__.py | evelink/__init__.py | """EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import parsing
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
| """EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
| Remove parsing from public interface | Remove parsing from public interface
| Python | mit | zigdon/evelink,FashtimeDotCom/evelink,bastianh/evelink,ayust/evelink,Morloth1274/EVE-Online-POCO-manager | """EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import parsing
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
Remove parsing from public interface | """EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
| <commit_before>"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import parsing
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
<commit_msg>Remove parsing from public interface<commit_after> | """EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
| """EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import parsing
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
Remove parsing from public interface"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
| <commit_before>"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import parsing
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
<commit_msg>Remove parsing from public interface<commit_after>"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
|
46df020f5f349ac02c509e334ffd7e1f5970915b | detectem/exceptions.py | detectem/exceptions.py | class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(msg)
| class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(self.msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
| Fix in tests for exception messages | Fix in tests for exception messages
| Python | mit | spectresearch/detectem | class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(msg)
Fix in tests for exception messages | class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(self.msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
| <commit_before>class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(msg)
<commit_msg>Fix in tests for exception messages<commit_after> | class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(self.msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
| class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(msg)
Fix in tests for exception messagesclass DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(self.msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
| <commit_before>class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(msg)
<commit_msg>Fix in tests for exception messages<commit_after>class DockerStartError(Exception):
pass
class NotNamedParameterFound(Exception):
pass
class SplashError(Exception):
def __init__(self, msg):
self.msg = 'Splash error: {}'.format(msg)
super().__init__(self.msg)
class NoPluginsError(Exception):
def __init__(self, msg):
self.msg = msg
super().__init__(self.msg)
|
0aaa546435a261a03e27fee53a3c5f334cca6b66 | spacy/tests/regression/test_issue768.py | spacy/tests/regression/test_issue768.py | # coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import TOKENIZER_EXCEPTIONS, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
| # coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import get_tokenizer_exceptions, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = get_tokenizer_exceptions()
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
| Fix test after updating the French tokenizer stuff | Fix test after updating the French tokenizer stuff
| Python | mit | raphael0202/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,banglakit/spaCy,banglakit/spaCy,raphael0202/spaCy,recognai/spaCy,recognai/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,oroszgy/spaCy.hu,explosion/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,banglakit/spaCy,banglakit/spaCy,explosion/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,oroszgy/spaCy.hu,explosion/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,honnibal/spaCy,honnibal/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,spacy-io/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,recognai/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,explosion/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,aikramer2/spaCy,recognai/spaCy,raphael0202/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy | # coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import TOKENIZER_EXCEPTIONS, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
Fix test after updating the French tokenizer stuff | # coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import get_tokenizer_exceptions, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = get_tokenizer_exceptions()
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
| <commit_before># coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import TOKENIZER_EXCEPTIONS, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
<commit_msg>Fix test after updating the French tokenizer stuff<commit_after> | # coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import get_tokenizer_exceptions, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = get_tokenizer_exceptions()
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
| # coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import TOKENIZER_EXCEPTIONS, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
Fix test after updating the French tokenizer stuff# coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import get_tokenizer_exceptions, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = get_tokenizer_exceptions()
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
| <commit_before># coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import TOKENIZER_EXCEPTIONS, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
<commit_msg>Fix test after updating the French tokenizer stuff<commit_after># coding: utf-8
from __future__ import unicode_literals
from ...language import Language
from ...attrs import LANG
from ...fr.language_data import get_tokenizer_exceptions, STOP_WORDS
from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA
import pytest
@pytest.fixture
def fr_tokenizer_w_infix():
SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA)
# create new Language subclass to add to default infixes
class French(Language):
lang = 'fr'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'fr'
tokenizer_exceptions = get_tokenizer_exceptions()
stop_words = STOP_WORDS
infixes = TOKENIZER_INFIXES + [SPLIT_INFIX]
return French.Defaults.create_tokenizer()
@pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]),
("j'ai", ["j'", "ai"])])
def test_issue768(fr_tokenizer_w_infix, text, expected_tokens):
"""Allow zero-width 'infix' token during the tokenization process."""
tokens = fr_tokenizer_w_infix(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
|
6f2db6743f431019a46a2b977cb17dd6f0622fbd | yolodex/urls.py | yolodex/urls.py | from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_detail'),
]
urlpatterns = patterns('', *entity_urls)
| from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_graph_json'),
]
urlpatterns = patterns('', *entity_urls)
| Fix name of entity graph url | Fix name of entity graph url | Python | mit | correctiv/django-yolodex,correctiv/django-yolodex,correctiv/django-yolodex | from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_detail'),
]
urlpatterns = patterns('', *entity_urls)
Fix name of entity graph url | from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_graph_json'),
]
urlpatterns = patterns('', *entity_urls)
| <commit_before>from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_detail'),
]
urlpatterns = patterns('', *entity_urls)
<commit_msg>Fix name of entity graph url <commit_after> | from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_graph_json'),
]
urlpatterns = patterns('', *entity_urls)
| from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_detail'),
]
urlpatterns = patterns('', *entity_urls)
Fix name of entity graph url from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_graph_json'),
]
urlpatterns = patterns('', *entity_urls)
| <commit_before>from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_detail'),
]
urlpatterns = patterns('', *entity_urls)
<commit_msg>Fix name of entity graph url <commit_after>from django.conf.urls import patterns, url, include
from django.utils.translation import ugettext as _
from .views import (
RealmView,
EntityDetailView,
EntityNetworkView,
)
entity_urls = [
url(r'^$', RealmView.as_view(), name='overview'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/$',
EntityDetailView.as_view(),
name='entity_detail'),
url(r'^(?P<type>[\w-]+)/(?P<slug>[\w-]+)/graph\.json$',
EntityNetworkView.as_view(),
name='entity_graph_json'),
]
urlpatterns = patterns('', *entity_urls)
|
9af4f3bc2ddc07e47f311ae51e20e3f99733ea35 | Orange/tests/test_regression.py | Orange/tests/test_regression.py | import unittest
import inspect
import pkgutil
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
| import unittest
import inspect
import pkgutil
import traceback
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
try:
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
except TypeError as err:
traceback.print_exc()
continue
| Handle TypeError while testing all regression learners | Handle TypeError while testing all regression learners
| Python | bsd-2-clause | qPCR4vir/orange3,marinkaz/orange3,cheral/orange3,kwikadi/orange3,kwikadi/orange3,kwikadi/orange3,marinkaz/orange3,qPCR4vir/orange3,cheral/orange3,marinkaz/orange3,cheral/orange3,marinkaz/orange3,marinkaz/orange3,qPCR4vir/orange3,cheral/orange3,qPCR4vir/orange3,kwikadi/orange3,kwikadi/orange3,qPCR4vir/orange3,cheral/orange3,cheral/orange3,qPCR4vir/orange3,marinkaz/orange3,kwikadi/orange3 | import unittest
import inspect
import pkgutil
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
Handle TypeError while testing all regression learners | import unittest
import inspect
import pkgutil
import traceback
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
try:
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
except TypeError as err:
traceback.print_exc()
continue
| <commit_before>import unittest
import inspect
import pkgutil
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
<commit_msg>Handle TypeError while testing all regression learners<commit_after> | import unittest
import inspect
import pkgutil
import traceback
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
try:
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
except TypeError as err:
traceback.print_exc()
continue
| import unittest
import inspect
import pkgutil
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
Handle TypeError while testing all regression learnersimport unittest
import inspect
import pkgutil
import traceback
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
try:
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
except TypeError as err:
traceback.print_exc()
continue
| <commit_before>import unittest
import inspect
import pkgutil
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
<commit_msg>Handle TypeError while testing all regression learners<commit_after>import unittest
import inspect
import pkgutil
import traceback
import Orange
from Orange.data import Table
from Orange.regression import Learner
class RegressionLearnersTest(unittest.TestCase):
def all_learners(self):
regression_modules = pkgutil.walk_packages(
path=Orange.regression.__path__,
prefix="Orange.regression.",
onerror=lambda x: None)
for importer, modname, ispkg in regression_modules:
try:
module = pkgutil.importlib.import_module(modname)
except ImportError:
continue
for name, class_ in inspect.getmembers(module, inspect.isclass):
if issubclass(class_, Learner) and 'base' not in class_.__module__:
yield class_
def test_adequacy_all_learners(self):
for learner in self.all_learners():
try:
learner = learner()
table = Table("iris")
self.assertRaises(ValueError, learner, table)
except TypeError as err:
traceback.print_exc()
continue
|
441da7a34058733c298c81dbd97a35fca6e538e0 | pgpdump/__main__.py | pgpdump/__main__.py | import sys
import cProfile
from . import AsciiData, BinaryData
def parsefile(name):
with open(name) as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
cProfile.run('main()', 'main.profile')
| import sys
from . import AsciiData, BinaryData
def parsefile(name):
with open(name, 'rb') as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(packet)
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
main()
| Remove cProfile inclusion, always read file as binary | Remove cProfile inclusion, always read file as binary
Signed-off-by: Dan McGee <2591e5f46f28d303f9dc027d475a5c60d8dea17a@archlinux.org>
| Python | bsd-3-clause | toofishes/python-pgpdump | import sys
import cProfile
from . import AsciiData, BinaryData
def parsefile(name):
with open(name) as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
cProfile.run('main()', 'main.profile')
Remove cProfile inclusion, always read file as binary
Signed-off-by: Dan McGee <2591e5f46f28d303f9dc027d475a5c60d8dea17a@archlinux.org> | import sys
from . import AsciiData, BinaryData
def parsefile(name):
with open(name, 'rb') as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(packet)
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
main()
| <commit_before>import sys
import cProfile
from . import AsciiData, BinaryData
def parsefile(name):
with open(name) as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
cProfile.run('main()', 'main.profile')
<commit_msg>Remove cProfile inclusion, always read file as binary
Signed-off-by: Dan McGee <2591e5f46f28d303f9dc027d475a5c60d8dea17a@archlinux.org><commit_after> | import sys
from . import AsciiData, BinaryData
def parsefile(name):
with open(name, 'rb') as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(packet)
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
main()
| import sys
import cProfile
from . import AsciiData, BinaryData
def parsefile(name):
with open(name) as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
cProfile.run('main()', 'main.profile')
Remove cProfile inclusion, always read file as binary
Signed-off-by: Dan McGee <2591e5f46f28d303f9dc027d475a5c60d8dea17a@archlinux.org>import sys
from . import AsciiData, BinaryData
def parsefile(name):
with open(name, 'rb') as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(packet)
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
main()
| <commit_before>import sys
import cProfile
from . import AsciiData, BinaryData
def parsefile(name):
with open(name) as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
cProfile.run('main()', 'main.profile')
<commit_msg>Remove cProfile inclusion, always read file as binary
Signed-off-by: Dan McGee <2591e5f46f28d303f9dc027d475a5c60d8dea17a@archlinux.org><commit_after>import sys
from . import AsciiData, BinaryData
def parsefile(name):
with open(name, 'rb') as infile:
if name.endswith('.asc'):
data = AsciiData(infile.read())
else:
data = BinaryData(infile.read())
counter = 0
for packet in data.packets():
counter += 1
print(packet)
print(counter)
def main():
for filename in sys.argv[1:]:
parsefile(filename)
if __name__ == '__main__':
main()
|
bbe86b97f38a3c99e8271a5f167223a965ef1ff0 | docs/conf.py | docs/conf.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
| Add link to source code in documentation | Add link to source code in documentation
| Python | mit | numberly/thingy | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
Add link to source code in documentation | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
<commit_msg>Add link to source code in documentation<commit_after> | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
Add link to source code in documentation#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
<commit_msg>Add link to source code in documentation<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
]
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Thingy'
copyright = 'numberly'
author = 'numberly'
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
|
cf2ae3c36c18ac00092736e076be0c4c09df6958 | sklearn_porter/language/go.py | sklearn_porter/language/go.py | # -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if ({0} {1} {2}) {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
| # -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if {0} {1} {2} {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
| Remove redundant parentheses around if conditions | feature/oop-api-refactoring: Remove redundant parentheses around if conditions
| Python | bsd-3-clause | nok/sklearn-porter | # -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if ({0} {1} {2}) {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
feature/oop-api-refactoring: Remove redundant parentheses around if conditions | # -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if {0} {1} {2} {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
| <commit_before># -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if ({0} {1} {2}) {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
<commit_msg>feature/oop-api-refactoring: Remove redundant parentheses around if conditions<commit_after> | # -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if {0} {1} {2} {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
| # -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if ({0} {1} {2}) {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
feature/oop-api-refactoring: Remove redundant parentheses around if conditions# -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if {0} {1} {2} {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
| <commit_before># -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if ({0} {1} {2}) {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
<commit_msg>feature/oop-api-refactoring: Remove redundant parentheses around if conditions<commit_after># -*- coding: utf-8 -*-
from os.path import sep
KEY = 'go'
LABEL = 'Go'
DEPENDENCIES = ['go']
TEMP_DIR = 'go'
SUFFIX = 'go'
# go build -o tmp/estimator tmp/estimator.go
CMD_COMPILE = 'go build -o {dest_dir}' + sep + '{dest_file} {src_dir}' + sep + '{src_file}'
# tmp/estimator <args>
CMD_EXECUTE = '{dest_dir}' + sep + '{dest_file}'
TEMPLATES = {
# if/else condition:
'if': 'if {0} {1} {2} {{',
'else': '} else {',
'endif': '}',
# Basics:
'indent': '\t',
'join': '',
'type': '{0}',
# Arrays:
'in_brackets': '{{{0}}}',
'arr[]': '{name} := []{type} {{{values}}}', # ages := []int {1, 2}
'arr[][]': '{name} := [][]{type} {{{values}}}',
# Primitive data types:
'int': 'int',
'double': 'float64'
}
|
8b87a55a03422cc499b2f7cc168bcc0c15c0ae42 | mycli/clibuffer.py | mycli/clibuffer.py | from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
| from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
text.endswith('\\g') or # Ended with \g
text.endswith('\\G') or # Ended with \G
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
| Make \G or \g to end a query. | Make \G or \g to end a query.
| Python | bsd-3-clause | j-bennet/mycli,jinstrive/mycli,mdsrosa/mycli,evook/mycli,shoma/mycli,chenpingzhao/mycli,mdsrosa/mycli,D-e-e-m-o/mycli,evook/mycli,jinstrive/mycli,martijnengler/mycli,webwlsong/mycli,webwlsong/mycli,oguzy/mycli,suzukaze/mycli,oguzy/mycli,danieljwest/mycli,MnO2/rediscli,danieljwest/mycli,martijnengler/mycli,D-e-e-m-o/mycli,j-bennet/mycli,MnO2/rediscli,chenpingzhao/mycli,ZuoGuocai/mycli,suzukaze/mycli,ZuoGuocai/mycli,shoma/mycli | from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
Make \G or \g to end a query. | from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
text.endswith('\\g') or # Ended with \g
text.endswith('\\G') or # Ended with \G
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
| <commit_before>from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
<commit_msg>Make \G or \g to end a query.<commit_after> | from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
text.endswith('\\g') or # Ended with \g
text.endswith('\\G') or # Ended with \G
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
| from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
Make \G or \g to end a query.from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
text.endswith('\\g') or # Ended with \g
text.endswith('\\G') or # Ended with \G
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
| <commit_before>from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
<commit_msg>Make \G or \g to end a query.<commit_after>from prompt_toolkit.buffer import Buffer
from prompt_toolkit.filters import Condition
class CLIBuffer(Buffer):
def __init__(self, always_multiline, *args, **kwargs):
self.always_multiline = always_multiline
@Condition
def is_multiline():
doc = self.document
return self.always_multiline and not _multiline_exception(doc.text)
super(self.__class__, self).__init__(*args, is_multiline=is_multiline,
tempfile_suffix='.sql', **kwargs)
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith('\\fs'):
return orig.endswith('\n')
return (text.startswith('\\') or # Special Command
text.endswith(';') or # Ended with a semi-colon
text.endswith('\\g') or # Ended with \g
text.endswith('\\G') or # Ended with \G
(text == 'exit') or # Exit doesn't need semi-colon
(text == 'quit') or # Quit doesn't need semi-colon
(text == ':q') or # To all the vim fans out there
(text == '') # Just a plain enter without any text
)
|
f268b5e62ca8bbf1712225d4c8d6d38580f38fba | quantum/__init__.py | quantum/__init__.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| Make the quantum top-level a namespace package. | Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d
| Python | apache-2.0 | netscaler/neutron,mahak/neutron,miyakz1192/neutron,takeshineshiro/neutron,NeCTAR-RC/neutron,apporc/neutron,klmitch/neutron,JioCloud/neutron,skyddv/neutron,swdream/neutron,rossella/neutron,CiscoSystems/quantum,CiscoSystems/QL3Proto,aristanetworks/arista-ovs-quantum,psiwczak/quantum,eayunstack/neutron,liqin75/vse-vpnaas-plugin,armando-migliaccio/neutron,yamt/neutron,silenci/neutron,CiscoSystems/QL3Proto,tpaszkowski/quantum,klmitch/neutron,yamt/neutron,psiwczak/quantum,xchenum/quantum,tpaszkowski/quantum,watonyweng/neutron,sajuptpm/neutron-ipam,cloudbase/neutron-virtualbox,mattt416/neutron,FreescaleSemiconductor/quantum,eonpatapon/neutron,CiscoSystems/QL3Proto,vveerava/Openstack,ntt-sic/neutron,psiwczak/quantum,rossella/neutron,gkotton/neutron,eonpatapon/neutron,savi-dev/quantum,jerryz1982/neutron,aristanetworks/neutron,xchenum/quantum-bug,citrix-openstack-build/neutron,MaximNevrov/neutron,leeseuljeong/leeseulstack_neutron,psiwczak/quantum,SamYaple/neutron,yuewko/neutron,sileht/deb-openstack-quantum,liqin75/vse-vpnaas-plugin,javaos74/neutron,huntxu/neutron,suneeth51/neutron,wolverineav/neutron,skyddv/neutron,zhhf/charging,noironetworks/neutron,Metaswitch/calico-neutron,javaos74/neutron,armando-migliaccio/neutron,liqin75/vse-vpnaas-plugin,yanheven/neutron,wenhuizhang/neutron,yamahata/neutron,paninetworks/neutron,openstack/neutron,vijayendrabvs/hap,openstack/neutron,jacknjzhou/neutron,kaiweifan/vse-lbaas-plugin-poc,leeseuljeong/leeseulstack_neutron,projectcalico/calico-neutron,kaiweifan/vse-lbaas-plugin-poc,Juniper/neutron,aristanetworks/neutron,waltBB/neutron_read,barnsnake351/neutron,ntt-sic/neutron,pnavarro/neutron,zhhf/charging,JianyuWang/neutron,mandeepdhami/neutron,bigswitch/neutron,Comcast/neutron,citrix-openstack-build/neutron,vivekanand1101/neutron,jumpojoy/neutron,sileht/deb-openstack-quantum,Brocade-OpenSource/OpenStack-DNRM-Neutron,beagles/neutron_hacking,infobloxopen/neutron,dhanunjaya/neutron,sebrandon1/neutron,dims/neutron,cisco-openstack/neutron,waltBB/neutron_read,mahak/neutron,virtualopensystems/neutron,yamahata/neutron,Brocade-OpenSource/OpenStack-DNRM-Neutron,yuewko/neutron,leeseuljeong/leeseulstack_neutron,ykaneko/quantum,vijayendrabvs/hap,ntt-sic/neutron,beagles/neutron_hacking,suneeth51/neutron,magic0704/neutron,beagles/neutron_hacking,ykaneko/neutron,xchenum/quantum,yamahata/tacker,blueboxgroup/neutron,gkotton/neutron,CiscoSystems/neutron,ykaneko/quantum,mmnelemane/neutron,jumpojoy/neutron,vveerava/Openstack,chitr/neutron,vveerava/Openstack,bgxavier/neutron,adelina-t/neutron,savi-dev/quantum,SamYaple/neutron,CiscoSystems/vespa,leeseulstack/openstack,JianyuWang/neutron,asgard-lab/neutron,vijayendrabvs/ssl-neutron,yanheven/neutron,rdo-management/neutron,Juniper/contrail-dev-neutron,neoareslinux/neutron,cernops/neutron,JioCloud/neutron,Juniper/neutron,leeseulstack/openstack,sajuptpm/neutron-ipam,vijayendrabvs/hap,CiscoSystems/quantum,rickerc/neutron_audit,sebrandon1/neutron,Juniper/contrail-dev-neutron,ykaneko/quantum,ykaneko/neutron,cloudbase/neutron,gkotton/neutron,cloudbase/neutron-virtualbox,adelina-t/neutron,leeseulstack/openstack,xchenum/quantum,projectcalico/calico-neutron,gopal1cloud/neutron,yamt/neutron,virtualopensystems/neutron,kaiweifan/neutron,barnsnake351/neutron,sasukeh/neutron,mandeepdhami/neutron,zhhf/charging,rdo-management/neutron,silenci/neutron,tpaszkowski/quantum,Juniper/neutron,infobloxopen/neutron,cloudbase/neutron,cernops/neutron,sajuptpm/neutron-ipam,CiscoSystems/neutron,sileht/deb-openstack-quantum,vivekanand1101/neutron,paninetworks/neutron,CiscoSystems/vespa,oeeagle/quantum,citrix-openstack-build/neutron,kaiweifan/vse-lbaas-plugin-poc,igor-toga/local-snat,CiscoSystems/vespa,vijayendrabvs/ssl-neutron,kaiweifan/neutron,alexandrucoman/vbox-neutron-agent,virtualopensystems/neutron,chitr/neutron,xchenum/quantum-bug,noironetworks/neutron,rickerc/neutron_audit,openstack/neutron,blueboxgroup/neutron,yamt/neutron,FreescaleSemiconductor/quantum,takeshineshiro/neutron,dims/neutron,igor-toga/local-snat,mahak/neutron,ykaneko/quantum,xchenum/quantum-bug,glove747/liberty-neutron,SmartInfrastructures/neutron,Juniper/contrail-dev-neutron,NeCTAR-RC/neutron,aristanetworks/arista-ovs-quantum,jerryz1982/neutron,armando-migliaccio/neutron,aristanetworks/arista-ovs-quantum,FreescaleSemiconductor/quantum,neoareslinux/neutron,antonioUnina/neutron,tpaszkowski/quantum,redhat-openstack/neutron,dhanunjaya/neutron,rossella/neutron,vijayendrabvs/ssl-neutron,swdream/neutron,eayunstack/neutron,vbannai/neutron,netscaler/neutron,SmartInfrastructures/neutron,kaiweifan/vse-lbaas-plugin-poc,wolverineav/neutron,liqin75/vse-vpnaas-plugin,savi-dev/quantum,savi-dev/quantum,magic0704/neutron,Brocade-OpenSource/OpenStack-DNRM-Neutron,Stavitsky/neutron,apporc/neutron,kaiweifan/neutron,cisco-openstack/neutron,rossella/neutron,sasukeh/neutron,mattt416/neutron,pnavarro/neutron,shahbazn/neutron,CiscoSystems/quantum,vbannai/neutron,FreescaleSemiconductor/quantum,oeeagle/quantum,yamahata/neutron,gopal1cloud/neutron,ykaneko/neutron,antonioUnina/neutron,Metaswitch/calico-neutron,redhat-openstack/neutron,wenhuizhang/neutron,Comcast/neutron,watonyweng/neutron,rickerc/neutron_audit,CiscoSystems/neutron,alexandrucoman/vbox-neutron-agent,aristanetworks/arista-ovs-quantum,bigswitch/neutron,armando-migliaccio/neutron,blueboxgroup/neutron,vbannai/neutron,yamahata/tacker,Stavitsky/neutron,Comcast/neutron,jacknjzhou/neutron,glove747/liberty-neutron,netscaler/neutron,huntxu/neutron,asgard-lab/neutron,bgxavier/neutron,mmnelemane/neutron,MaximNevrov/neutron,miyakz1192/neutron,yamahata/tacker,shahbazn/neutron | Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| <commit_before><commit_msg>Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d<commit_after> | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| <commit_before><commit_msg>Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| |
b02d7e1e288eeaf38cfc299765f4c940bad5ea36 | examples/add_misc_features.py | examples/add_misc_features.py | #!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
token.misc['Polysemous'] = True
# Print to standard out which can then be redirected.
print(corpus.conll())
| #!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
| Update example with correct form, and with comment. | Update example with correct form, and with comment.
| Python | mit | pyconll/pyconll,pyconll/pyconll | #!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
token.misc['Polysemous'] = True
# Print to standard out which can then be redirected.
print(corpus.conll())
Update example with correct form, and with comment. | #!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
| <commit_before>#!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
token.misc['Polysemous'] = True
# Print to standard out which can then be redirected.
print(corpus.conll())
<commit_msg>Update example with correct form, and with comment.<commit_after> | #!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
| #!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
token.misc['Polysemous'] = True
# Print to standard out which can then be redirected.
print(corpus.conll())
Update example with correct form, and with comment.#!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
| <commit_before>#!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
token.misc['Polysemous'] = True
# Print to standard out which can then be redirected.
print(corpus.conll())
<commit_msg>Update example with correct form, and with comment.<commit_after>#!/usr/bin/env python
#
# Add a singleton feature to the misc column of all tokens of a certain form.
#
# Format
# add_misc_features.py filename > transform.conll
#
import argparse
import pyconll
parser = argparse.ArgumentParser()
parser.add_argument('filename', help='The name of the file to transform')
args = parser.parse_args()
corpus = pyconll.load_from_file(args.filename)
for sentence in corpus:
for token in sentence:
if token.lemma == 'dog' and token.upos == 'VERB':
# Note: This means that 'Polysemous' will be present as a singleton
# in the token line. To remove 'Polysemous' from the token's
# features, call del token.misc['Polysemous']
token.misc['Polysemous'] = None
# Print to standard out which can then be redirected.
print(corpus.conll())
|
94e822e67f3550710347f563ae1d32e301d2e08b | raven/processors.py | raven/processors.py | """
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
if 'password' in k or 'secret' in k:
# store mask as a fixed length for security
frame['vars'][k] = '*'*16
return data | """
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
lower_k = k.lower()
if 'password' in lower_k or 'secret' in lower_k:
# store mask as a fixed length for security
frame['vars'][k] = '*' * 16
return data
| Handle var names that are uppercase | Handle var names that are uppercase
| Python | bsd-3-clause | smarkets/raven-python,patrys/opbeat_python,ronaldevers/raven-python,ronaldevers/raven-python,recht/raven-python,danriti/raven-python,lepture/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,nikolas/raven-python,akalipetis/raven-python,johansteffner/raven-python,beniwohli/apm-agent-python,dbravender/raven-python,patrys/opbeat_python,jmagnusson/raven-python,nikolas/raven-python,inspirehep/raven-python,lepture/raven-python,getsentry/raven-python,alex/raven,hzy/raven-python,jbarbuto/raven-python,johansteffner/raven-python,smarkets/raven-python,tarkatronic/opbeat_python,ticosax/opbeat_python,Photonomie/raven-python,openlabs/raven,jbarbuto/raven-python,akheron/raven-python,jmp0xf/raven-python,hzy/raven-python,tarkatronic/opbeat_python,collective/mr.poe,tarkatronic/opbeat_python,getsentry/raven-python,danriti/raven-python,dirtycoder/opbeat_python,icereval/raven-python,beniwohli/apm-agent-python,jmagnusson/raven-python,nikolas/raven-python,beniwohli/apm-agent-python,danriti/raven-python,akalipetis/raven-python,dbravender/raven-python,arthurlogilab/raven-python,dbravender/raven-python,icereval/raven-python,ticosax/opbeat_python,beniwohli/apm-agent-python,jmp0xf/raven-python,daikeren/opbeat_python,getsentry/raven-python,akheron/raven-python,recht/raven-python,percipient/raven-python,johansteffner/raven-python,daikeren/opbeat_python,nikolas/raven-python,dirtycoder/opbeat_python,icereval/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,ewdurbin/raven-python,arthurlogilab/raven-python,inspirehep/raven-python,jbarbuto/raven-python,inspirehep/raven-python,hzy/raven-python,akheron/raven-python,daikeren/opbeat_python,percipient/raven-python,patrys/opbeat_python,arthurlogilab/raven-python,dirtycoder/opbeat_python,smarkets/raven-python,jmagnusson/raven-python,patrys/opbeat_python,inspirehep/raven-python,jbarbuto/raven-python,Photonomie/raven-python,smarkets/raven-python,ticosax/opbeat_python,recht/raven-python,akalipetis/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jmp0xf/raven-python,ewdurbin/raven-python,icereval/raven-python,arthurlogilab/raven-python,someonehan/raven-python,percipient/raven-python,someonehan/raven-python,lepture/raven-python,lopter/raven-python-old,ewdurbin/raven-python,Photonomie/raven-python,ronaldevers/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,someonehan/raven-python | """
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
if 'password' in k or 'secret' in k:
# store mask as a fixed length for security
frame['vars'][k] = '*'*16
return dataHandle var names that are uppercase | """
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
lower_k = k.lower()
if 'password' in lower_k or 'secret' in lower_k:
# store mask as a fixed length for security
frame['vars'][k] = '*' * 16
return data
| <commit_before>"""
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
if 'password' in k or 'secret' in k:
# store mask as a fixed length for security
frame['vars'][k] = '*'*16
return data<commit_msg>Handle var names that are uppercase<commit_after> | """
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
lower_k = k.lower()
if 'password' in lower_k or 'secret' in lower_k:
# store mask as a fixed length for security
frame['vars'][k] = '*' * 16
return data
| """
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
if 'password' in k or 'secret' in k:
# store mask as a fixed length for security
frame['vars'][k] = '*'*16
return dataHandle var names that are uppercase"""
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
lower_k = k.lower()
if 'password' in lower_k or 'secret' in lower_k:
# store mask as a fixed length for security
frame['vars'][k] = '*' * 16
return data
| <commit_before>"""
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
if 'password' in k or 'secret' in k:
# store mask as a fixed length for security
frame['vars'][k] = '*'*16
return data<commit_msg>Handle var names that are uppercase<commit_after>"""
raven.core.processors
~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
class Processor(object):
def __init__(self, client):
self.client = client
def process(self, data, **kwargs):
resp = self.get_data(data)
if resp:
data = resp
return data
class SantizePasswordsProcessor(Processor):
"""
Asterisk out passwords from password fields in frames.
"""
def process(self, data, **kwargs):
if 'sentry.interfaces.Stacktrace' in data:
if 'frames' in data['sentry.interfaces.Stacktrace']:
for frame in data['sentry.interfaces.Stacktrace']['frames']:
if 'vars' in frame:
for k, v in frame['vars'].iteritems():
lower_k = k.lower()
if 'password' in lower_k or 'secret' in lower_k:
# store mask as a fixed length for security
frame['vars'][k] = '*' * 16
return data
|
26a53141e844c11e7ff904af2620b7ee125b011d | diana/tracking.py | diana/tracking.py | from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
| from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
@property
def player_ship(self):
for _obj in self.objects.values():
if _obj['type'] == p.ObjectType.player_vessel:
return _obj
return {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
| Add a convenience method to get the player ship | Add a convenience method to get the player ship
| Python | mit | prophile/libdiana | from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
Add a convenience method to get the player ship | from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
@property
def player_ship(self):
for _obj in self.objects.values():
if _obj['type'] == p.ObjectType.player_vessel:
return _obj
return {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
| <commit_before>from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
<commit_msg>Add a convenience method to get the player ship<commit_after> | from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
@property
def player_ship(self):
for _obj in self.objects.values():
if _obj['type'] == p.ObjectType.player_vessel:
return _obj
return {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
| from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
Add a convenience method to get the player shipfrom . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
@property
def player_ship(self):
for _obj in self.objects.values():
if _obj['type'] == p.ObjectType.player_vessel:
return _obj
return {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
| <commit_before>from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
<commit_msg>Add a convenience method to get the player ship<commit_after>from . import packet as p
class Tracker:
def __init__(self):
self.objects = {}
@property
def player_ship(self):
for _obj in self.objects.values():
if _obj['type'] == p.ObjectType.player_vessel:
return _obj
return {}
def update_object(self, record):
try:
oid = record['object']
except KeyError:
return
else:
self.objects.setdefault(oid, {}).update(record)
def remove_object(self, oid):
try:
del self.objects[oid]
except KeyError:
pass
def rx(self, packet):
if isinstance(packet, p.ObjectUpdatePacket):
for record in packet.records:
self.update_object(record)
elif isinstance(packet, p.DestroyObjectPacket):
self.remove_object(packet.object)
|
7ca4b1652dc5fa35bbacc2d587addacc9ce9da83 | fuzzinator/call_job.py | fuzzinator/call_job.py | # Copyright (c) 2016 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test)
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
| # Copyright (c) 2016-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test if isinstance(test, bytes) else str(test).encode('utf-8'))
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
| Prepare test hashing for complex types. | Prepare test hashing for complex types.
| Python | bsd-3-clause | renatahodovan/fuzzinator,renatahodovan/fuzzinator,renatahodovan/fuzzinator,renatahodovan/fuzzinator,akosthekiss/fuzzinator,akosthekiss/fuzzinator,akosthekiss/fuzzinator,akosthekiss/fuzzinator | # Copyright (c) 2016 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test)
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
Prepare test hashing for complex types. | # Copyright (c) 2016-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test if isinstance(test, bytes) else str(test).encode('utf-8'))
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
| <commit_before># Copyright (c) 2016 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test)
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
<commit_msg>Prepare test hashing for complex types.<commit_after> | # Copyright (c) 2016-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test if isinstance(test, bytes) else str(test).encode('utf-8'))
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
| # Copyright (c) 2016 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test)
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
Prepare test hashing for complex types.# Copyright (c) 2016-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test if isinstance(test, bytes) else str(test).encode('utf-8'))
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
| <commit_before># Copyright (c) 2016 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test)
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
<commit_msg>Prepare test hashing for complex types.<commit_after># Copyright (c) 2016-2018 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import hashlib
class CallJob(object):
"""
Base class for jobs that call SUTs and can find new issues.
"""
def __init__(self, config, db, listener):
self.config = config
self.db = db
self.listener = listener
# expects self.sut_section and self.fuzzer_name to be set by descendants
def add_issue(self, issue, new_issues):
test = issue['test']
# Save issue details.
issue.update(dict(sut=self.sut_section,
fuzzer=self.fuzzer_name,
test=test,
reduced=False,
reported=False))
# Generate default hash ID for the test if does not exist.
if 'id' not in issue or not issue['id']:
hasher = hashlib.md5()
hasher.update(test if isinstance(test, bytes) else str(test).encode('utf-8'))
issue['id'] = hasher.hexdigest()
# Save new issues.
if self.db.add_issue(issue):
new_issues.append(issue)
self.listener.new_issue(issue=issue)
|
73a4aca6e9c0c4c9ef53e498319bf754c6bb8edb | rippl/rippl/urls.py | rippl/rippl/urls.py | """rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
url(r'^legislature/', include('legislature.urls')),
]
| """rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(
r'^mission_statement',
TemplateView.as_view(template_name='mission_statement.html'),
),
url(r'^legislature/', include('legislature.urls')),
]
| Fix line length to pass CI | Fix line length to pass CI | Python | mit | gnmerritt/dailyrippl,gnmerritt/dailyrippl,gnmerritt/dailyrippl,gnmerritt/dailyrippl | """rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
url(r'^legislature/', include('legislature.urls')),
]
Fix line length to pass CI | """rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(
r'^mission_statement',
TemplateView.as_view(template_name='mission_statement.html'),
),
url(r'^legislature/', include('legislature.urls')),
]
| <commit_before>"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
url(r'^legislature/', include('legislature.urls')),
]
<commit_msg>Fix line length to pass CI<commit_after> | """rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(
r'^mission_statement',
TemplateView.as_view(template_name='mission_statement.html'),
),
url(r'^legislature/', include('legislature.urls')),
]
| """rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
url(r'^legislature/', include('legislature.urls')),
]
Fix line length to pass CI"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(
r'^mission_statement',
TemplateView.as_view(template_name='mission_statement.html'),
),
url(r'^legislature/', include('legislature.urls')),
]
| <commit_before>"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
url(r'^legislature/', include('legislature.urls')),
]
<commit_msg>Fix line length to pass CI<commit_after>"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(
r'^mission_statement',
TemplateView.as_view(template_name='mission_statement.html'),
),
url(r'^legislature/', include('legislature.urls')),
]
|
089b1c3ab27bb5d3c343d7787a357c49ff56bfc8 | docs/conf.py | docs/conf.py | import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
| import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
| Fix a badly indented line. (PEP8 E121) | Fix a badly indented line. (PEP8 E121)
| Python | bsd-3-clause | lamby/django-slack | import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
Fix a badly indented line. (PEP8 E121) | import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
| <commit_before>import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
<commit_msg>Fix a badly indented line. (PEP8 E121)<commit_after> | import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
| import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
Fix a badly indented line. (PEP8 E121)import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
| <commit_before>import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
<commit_msg>Fix a badly indented line. (PEP8 E121)<commit_after>import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from django.conf import settings
settings.configure()
project = 'django-slack'
version = ''
release = ''
copyright = '2014, 2015 Chris Lamb'
author = 'lamby'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
html_theme = "sphinx_rtd_theme"
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, author, 'manual', True),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
1b9d453f6fe0d2128849f98922f082d6ccfbee69 | channelfilter.py | channelfilter.py | #!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, project):
"""
:param project: Get all channels to spam for the given project
:type project: basestring
"""
channels = set()
for channel in self.config['channels']:
if project in self.config['channels'][channel]:
channels.add(channel)
continue
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
print(channels)
return channels
| #!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, projects):
"""
:param project: Get all channels to spam for given projects
:type project: list
"""
channels = set()
for channel in self.config['channels']:
for project in projects:
if project in self.config['channels'][channel]:
channels.add(channel)
break
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
return channels
| Fix channel filtering to work properly | Fix channel filtering to work properly
| Python | mit | wikimedia/labs-tools-wikibugs2,wikimedia/labs-tools-wikibugs2 | #!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, project):
"""
:param project: Get all channels to spam for the given project
:type project: basestring
"""
channels = set()
for channel in self.config['channels']:
if project in self.config['channels'][channel]:
channels.add(channel)
continue
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
print(channels)
return channels
Fix channel filtering to work properly | #!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, projects):
"""
:param project: Get all channels to spam for given projects
:type project: list
"""
channels = set()
for channel in self.config['channels']:
for project in projects:
if project in self.config['channels'][channel]:
channels.add(channel)
break
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
return channels
| <commit_before>#!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, project):
"""
:param project: Get all channels to spam for the given project
:type project: basestring
"""
channels = set()
for channel in self.config['channels']:
if project in self.config['channels'][channel]:
channels.add(channel)
continue
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
print(channels)
return channels
<commit_msg>Fix channel filtering to work properly<commit_after> | #!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, projects):
"""
:param project: Get all channels to spam for given projects
:type project: list
"""
channels = set()
for channel in self.config['channels']:
for project in projects:
if project in self.config['channels'][channel]:
channels.add(channel)
break
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
return channels
| #!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, project):
"""
:param project: Get all channels to spam for the given project
:type project: basestring
"""
channels = set()
for channel in self.config['channels']:
if project in self.config['channels'][channel]:
channels.add(channel)
continue
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
print(channels)
return channels
Fix channel filtering to work properly#!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, projects):
"""
:param project: Get all channels to spam for given projects
:type project: list
"""
channels = set()
for channel in self.config['channels']:
for project in projects:
if project in self.config['channels'][channel]:
channels.add(channel)
break
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
return channels
| <commit_before>#!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, project):
"""
:param project: Get all channels to spam for the given project
:type project: basestring
"""
channels = set()
for channel in self.config['channels']:
if project in self.config['channels'][channel]:
channels.add(channel)
continue
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
print(channels)
return channels
<commit_msg>Fix channel filtering to work properly<commit_after>#!/usr/bin/env python
import os
import yaml
class ChannelFilter(object):
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), 'channels.yaml')
with open(path) as f:
self.config = yaml.load(f)
print(self.config)
@property
def firehose_channel(self):
return self.config['firehose-channel']
@property
def default_channel(self):
return self.config['default-channel']
def all_channels(self):
channels = [self.default_channel, self.firehose_channel] + list(self.config['channels'])
return list(set(channels))
def channels_for(self, projects):
"""
:param project: Get all channels to spam for given projects
:type project: list
"""
channels = set()
for channel in self.config['channels']:
for project in projects:
if project in self.config['channels'][channel]:
channels.add(channel)
break
if not channels:
channels.add(self.default_channel)
channels.add(self.firehose_channel)
return channels
|
8ddc1e40dd505aeb1b28d05238fa198eb3260f94 | fireplace/cards/tgt/hunter.py | fireplace/cards/tgt/hunter.py | from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
##
# Spells
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
| from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
# Stablemaster
class AT_057:
play = Buff(TARGET, "AT_057o")
# Brave Archer
class AT_059:
inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2)
##
# Spells
# Powershot
class AT_056:
play = Hit(TARGET | TARGET_ADJACENT, 2)
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
##
# Secrets
# Bear Trap
class AT_060:
events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125"))
| Implement more TGT Hunter cards | Implement more TGT Hunter cards
| Python | agpl-3.0 | smallnamespace/fireplace,Ragowit/fireplace,Ragowit/fireplace,amw2104/fireplace,NightKev/fireplace,liujimj/fireplace,jleclanche/fireplace,Meerkov/fireplace,oftc-ftw/fireplace,liujimj/fireplace,amw2104/fireplace,Meerkov/fireplace,oftc-ftw/fireplace,beheh/fireplace,smallnamespace/fireplace | from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
##
# Spells
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
Implement more TGT Hunter cards | from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
# Stablemaster
class AT_057:
play = Buff(TARGET, "AT_057o")
# Brave Archer
class AT_059:
inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2)
##
# Spells
# Powershot
class AT_056:
play = Hit(TARGET | TARGET_ADJACENT, 2)
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
##
# Secrets
# Bear Trap
class AT_060:
events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125"))
| <commit_before>from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
##
# Spells
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
<commit_msg>Implement more TGT Hunter cards<commit_after> | from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
# Stablemaster
class AT_057:
play = Buff(TARGET, "AT_057o")
# Brave Archer
class AT_059:
inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2)
##
# Spells
# Powershot
class AT_056:
play = Hit(TARGET | TARGET_ADJACENT, 2)
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
##
# Secrets
# Bear Trap
class AT_060:
events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125"))
| from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
##
# Spells
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
Implement more TGT Hunter cardsfrom ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
# Stablemaster
class AT_057:
play = Buff(TARGET, "AT_057o")
# Brave Archer
class AT_059:
inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2)
##
# Spells
# Powershot
class AT_056:
play = Hit(TARGET | TARGET_ADJACENT, 2)
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
##
# Secrets
# Bear Trap
class AT_060:
events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125"))
| <commit_before>from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
##
# Spells
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
<commit_msg>Implement more TGT Hunter cards<commit_after>from ..utils import *
##
# Minions
# Ram Wrangler
class AT_010:
play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast())
# Stablemaster
class AT_057:
play = Buff(TARGET, "AT_057o")
# Brave Archer
class AT_059:
inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2)
##
# Spells
# Powershot
class AT_056:
play = Hit(TARGET | TARGET_ADJACENT, 2)
# Lock and Load
class AT_061:
play = Buff(FRIENDLY_HERO, "AT_061e")
class AT_061e:
events = OWN_SPELL_PLAY.on(
Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER))
)
# Ball of Spiders
class AT_062:
play = Summon(CONTROLLER, "FP1_011") * 3
##
# Secrets
# Bear Trap
class AT_060:
events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125"))
|
f17da7465592eede8be261ed3f997881f596ef18 | examples/helloworld/helloworld.py | examples/helloworld/helloworld.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=512, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=128, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
| Tweak example image conversion settings. | Tweak example image conversion settings.
| Python | bsd-3-clause | uekeueke/deepzoom.py,edsilv/deepzoom.py,uekeueke/deepzoom.py,edsilv/deepzoom.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=512, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
Tweak example image conversion settings. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=128, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=512, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
<commit_msg>Tweak example image conversion settings.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=128, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=512, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
Tweak example image conversion settings.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=128, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=512, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
<commit_msg>Tweak example image conversion settings.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import deepzoom
# Specify your source image
SOURCE = "helloworld.jpg"
# Create Deep Zoom Image creator with weird parameters
creator = deepzoom.ImageCreator(tile_size=128, tile_overlap=2, tile_format="png",
image_quality=0.8, resize_filter="bicubic")
# Create Deep Zoom image pyramid from source
creator.create(SOURCE, "helloworld.dzi")
|
26e0d89e5178fb05b95f56cbef58ac37bfa6f1d9 | camera_opencv.py | camera_opencv.py | import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
| import os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
def __init__(self):
if os.environ.get('OPENCV_CAMERA_SOURCE'):
Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
| Use OPENCV_CAMERA_SOURCE environment variable to set source | Use OPENCV_CAMERA_SOURCE environment variable to set source
| Python | mit | miguelgrinberg/flask-video-streaming,miguelgrinberg/flask-video-streaming | import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
Use OPENCV_CAMERA_SOURCE environment variable to set source | import os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
def __init__(self):
if os.environ.get('OPENCV_CAMERA_SOURCE'):
Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
| <commit_before>import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
<commit_msg>Use OPENCV_CAMERA_SOURCE environment variable to set source<commit_after> | import os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
def __init__(self):
if os.environ.get('OPENCV_CAMERA_SOURCE'):
Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
| import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
Use OPENCV_CAMERA_SOURCE environment variable to set sourceimport os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
def __init__(self):
if os.environ.get('OPENCV_CAMERA_SOURCE'):
Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
| <commit_before>import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
<commit_msg>Use OPENCV_CAMERA_SOURCE environment variable to set source<commit_after>import os
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
def __init__(self):
if os.environ.get('OPENCV_CAMERA_SOURCE'):
Camera.set_video_source(int(os.environ['OPENCV_CAMERA_SOURCE']))
super(Camera, self).__init__()
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img)[1].tobytes()
|
b41ac0e6a5f4518b261b9106c2fbce7c55b3b9a5 | python/test/test_survey_submit.py | python/test/test_survey_submit.py | #!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
data = 'data'
client = EpiDBClient()
res = client.survey_submit(data)
print res
| #!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
key = '0123456789abcdef0123456789abcdef01234567'
data = 'data'
client = EpiDBClient(key)
res = client.survey_submit(data)
print res
| Update example to use api-key. | [python] Update example to use api-key.
| Python | agpl-3.0 | ISIFoundation/influenzanet-epidb-client | #!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
data = 'data'
client = EpiDBClient()
res = client.survey_submit(data)
print res
[python] Update example to use api-key. | #!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
key = '0123456789abcdef0123456789abcdef01234567'
data = 'data'
client = EpiDBClient(key)
res = client.survey_submit(data)
print res
| <commit_before>#!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
data = 'data'
client = EpiDBClient()
res = client.survey_submit(data)
print res
<commit_msg>[python] Update example to use api-key.<commit_after> | #!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
key = '0123456789abcdef0123456789abcdef01234567'
data = 'data'
client = EpiDBClient(key)
res = client.survey_submit(data)
print res
| #!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
data = 'data'
client = EpiDBClient()
res = client.survey_submit(data)
print res
[python] Update example to use api-key.#!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
key = '0123456789abcdef0123456789abcdef01234567'
data = 'data'
client = EpiDBClient(key)
res = client.survey_submit(data)
print res
| <commit_before>#!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
data = 'data'
client = EpiDBClient()
res = client.survey_submit(data)
print res
<commit_msg>[python] Update example to use api-key.<commit_after>#!/usr/bin/env python
import sys
sys.path += ['../']
from epidb.client import EpiDBClient
key = '0123456789abcdef0123456789abcdef01234567'
data = 'data'
client = EpiDBClient(key)
res = client.survey_submit(data)
print res
|
8dc4245db8e64fd5024e1d6fe0bc1b230b2dce85 | server/cg/manage.py | server/cg/manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings.dev")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Set default settings to dev | Set default settings to dev
| Python | mit | pramodliv1/conceptgrapher,pramodliv1/conceptgrapher,pramodliv1/conceptgrapher | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Set default settings to dev | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings.dev")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Set default settings to dev<commit_after> | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings.dev")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
Set default settings to dev#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings.dev")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| <commit_before>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<commit_msg>Set default settings to dev<commit_after>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cg.settings.dev")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
f034f69a24cd2a4048e23c54c73badd0674eb1aa | views/base.py | views/base.py | from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
eventlist = Event.query.filter(and_(Event.published is True, Event.start_time < (datetime.now() + timedelta(seconds=1)))).order_by(Event.start_time.desc()).all()
return render_template("base/events.j2", events=eventlist)
| from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
next_event = Event.query.filter(and_(Event.published == True, Event.start_time > datetime.now())).order_by(Event.start_time).first()
eventlist = Event.query.filter(and_(Event.published == True, Event.start_time < datetime.now())).order_by(Event.start_time.desc()).all()
if next_event:
eventlist.insert(0, next_event)
return render_template("base/events.j2", events=eventlist)
| Fix the events page, so that upcoming event shows up. | Fix the events page, so that upcoming event shows up.
| Python | mit | saseumn/website,saseumn/website | from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
eventlist = Event.query.filter(and_(Event.published is True, Event.start_time < (datetime.now() + timedelta(seconds=1)))).order_by(Event.start_time.desc()).all()
return render_template("base/events.j2", events=eventlist)
Fix the events page, so that upcoming event shows up. | from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
next_event = Event.query.filter(and_(Event.published == True, Event.start_time > datetime.now())).order_by(Event.start_time).first()
eventlist = Event.query.filter(and_(Event.published == True, Event.start_time < datetime.now())).order_by(Event.start_time.desc()).all()
if next_event:
eventlist.insert(0, next_event)
return render_template("base/events.j2", events=eventlist)
| <commit_before>from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
eventlist = Event.query.filter(and_(Event.published is True, Event.start_time < (datetime.now() + timedelta(seconds=1)))).order_by(Event.start_time.desc()).all()
return render_template("base/events.j2", events=eventlist)
<commit_msg>Fix the events page, so that upcoming event shows up.<commit_after> | from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
next_event = Event.query.filter(and_(Event.published == True, Event.start_time > datetime.now())).order_by(Event.start_time).first()
eventlist = Event.query.filter(and_(Event.published == True, Event.start_time < datetime.now())).order_by(Event.start_time.desc()).all()
if next_event:
eventlist.insert(0, next_event)
return render_template("base/events.j2", events=eventlist)
| from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
eventlist = Event.query.filter(and_(Event.published is True, Event.start_time < (datetime.now() + timedelta(seconds=1)))).order_by(Event.start_time.desc()).all()
return render_template("base/events.j2", events=eventlist)
Fix the events page, so that upcoming event shows up.from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
next_event = Event.query.filter(and_(Event.published == True, Event.start_time > datetime.now())).order_by(Event.start_time).first()
eventlist = Event.query.filter(and_(Event.published == True, Event.start_time < datetime.now())).order_by(Event.start_time.desc()).all()
if next_event:
eventlist.insert(0, next_event)
return render_template("base/events.j2", events=eventlist)
| <commit_before>from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
eventlist = Event.query.filter(and_(Event.published is True, Event.start_time < (datetime.now() + timedelta(seconds=1)))).order_by(Event.start_time.desc()).all()
return render_template("base/events.j2", events=eventlist)
<commit_msg>Fix the events page, so that upcoming event shows up.<commit_after>from datetime import datetime, timedelta
from flask import Blueprint, render_template
from sqlalchemy import and_
from models import Event
blueprint = Blueprint("base", __name__)
@blueprint.route("/")
def index():
upcoming = Event.query.filter_by(published=True).order_by(Event.start_time).first()
return render_template("base/index.j2", upcoming=upcoming)
@blueprint.route("/about")
def about():
return render_template("base/about.j2")
@blueprint.route("/events")
def events():
next_event = Event.query.filter(and_(Event.published == True, Event.start_time > datetime.now())).order_by(Event.start_time).first()
eventlist = Event.query.filter(and_(Event.published == True, Event.start_time < datetime.now())).order_by(Event.start_time.desc()).all()
if next_event:
eventlist.insert(0, next_event)
return render_template("base/events.j2", events=eventlist)
|
3f48d0fb0e44d35f29990c0d32c032ecee8fbe65 | conftest.py | conftest.py | import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
import dotenv
dotenv.read_dotenv()
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| Read our .env when we test. | Read our .env when we test.
| Python | apache-2.0 | hello-base/web,hello-base/web,hello-base/web,hello-base/web | import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
Read our .env when we test. | import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
import dotenv
dotenv.read_dotenv()
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| <commit_before>import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
<commit_msg>Read our .env when we test.<commit_after> | import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
import dotenv
dotenv.read_dotenv()
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
Read our .env when we test.import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
import dotenv
dotenv.read_dotenv()
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
| <commit_before>import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
<commit_msg>Read our .env when we test.<commit_after>import os
from django import get_version
from django.conf import settings
def pytest_report_header(config):
return 'django: ' + get_version()
def pytest_configure():
import dotenv
dotenv.read_dotenv()
if not settings.configured:
os.environ['DJANGO_SETTINGS_MODULE'] = 'base.settings'
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
|
8a663ecc384a1b0d43f554b894571103348ad7ab | responsive_design_helper/views.py | responsive_design_helper/views.py | from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url_to_test = self.request.build_absolute_uri()[0:-len("responsive/")]
context["url_to_test"] = url_to_test
return context
| from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url = self.request.build_absolute_uri()
context["url_to_test"] = url[0:url.rindex("responsive")]
return context
| Adjust so it works properly with types | Adjust so it works properly with types
| Python | apache-2.0 | tswicegood/django-responsive-design-helper,tswicegood/django-responsive-design-helper | from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url_to_test = self.request.build_absolute_uri()[0:-len("responsive/")]
context["url_to_test"] = url_to_test
return context
Adjust so it works properly with types | from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url = self.request.build_absolute_uri()
context["url_to_test"] = url[0:url.rindex("responsive")]
return context
| <commit_before>from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url_to_test = self.request.build_absolute_uri()[0:-len("responsive/")]
context["url_to_test"] = url_to_test
return context
<commit_msg>Adjust so it works properly with types<commit_after> | from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url = self.request.build_absolute_uri()
context["url_to_test"] = url[0:url.rindex("responsive")]
return context
| from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url_to_test = self.request.build_absolute_uri()[0:-len("responsive/")]
context["url_to_test"] = url_to_test
return context
Adjust so it works properly with typesfrom django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url = self.request.build_absolute_uri()
context["url_to_test"] = url[0:url.rindex("responsive")]
return context
| <commit_before>from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url_to_test = self.request.build_absolute_uri()[0:-len("responsive/")]
context["url_to_test"] = url_to_test
return context
<commit_msg>Adjust so it works properly with types<commit_after>from django.views.generic import TemplateView
class ResponsiveTestView(TemplateView):
template_name = "responsive_design_helper/%s.html"
def get_template_names(self, **kwargs):
t = self.kwargs.get('type', 'all') or 'all'
return self.template_name % t
def get_context_data(self, **kwargs):
context = super(ResponsiveTestView, self).get_context_data(**kwargs)
url = self.request.build_absolute_uri()
context["url_to_test"] = url[0:url.rindex("responsive")]
return context
|
005ac5832a4992c2d1091505c2be10ae6ad34ef5 | seleniumbase/config/proxy_list.py | seleniumbase/config/proxy_list.py | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| Update the example proxy list | Update the example proxy list
| Python | mit | mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the example proxy list | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| <commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the example proxy list<commit_after> | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the example proxy list"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| <commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the example proxy list<commit_after>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
eda35123356edd20b361aa2f1d1f20cc7b922e39 | settings_example.py | settings_example.py | import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
| import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
| Add CSV file name format setting example | Add CSV file name format setting example
| Python | mit | AustralianAntarcticDataCentre/save_emails_to_files,AustralianAntarcticDataCentre/save_emails_to_files | import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
Add CSV file name format setting example | import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
| <commit_before>import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
<commit_msg>Add CSV file name format setting example<commit_after> | import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
| import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
Add CSV file name format setting exampleimport os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
| <commit_before>import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
<commit_msg>Add CSV file name format setting example<commit_after>import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
CSV_NAME_FORMAT = '{year}-{month}-{day}T{hour}{minute}.csv'
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
LOGGING_FORMAT = '''
- file: %(pathname)s
level: %(levelname)s
line: %(lineno)s
message: |
%(message)s
time: %(asctime)s
'''.strip()
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
|
020d6e2bff5975aad79833bdf28c6a791e7953d1 | instabrade/__init__.py | instabrade/__init__.py | from __future__ import absolute_import
from collections import namedtuple
import pbr.version
__version__ = pbr.version.VersionInfo('instabrade').version_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
| from __future__ import absolute_import
from collections import namedtuple
from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
| Update how version is determined | Update how version is determined
| Python | mit | levi-rs/instabrade | from __future__ import absolute_import
from collections import namedtuple
import pbr.version
__version__ = pbr.version.VersionInfo('instabrade').version_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
Update how version is determined | from __future__ import absolute_import
from collections import namedtuple
from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
| <commit_before>from __future__ import absolute_import
from collections import namedtuple
import pbr.version
__version__ = pbr.version.VersionInfo('instabrade').version_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
<commit_msg>Update how version is determined<commit_after> | from __future__ import absolute_import
from collections import namedtuple
from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
| from __future__ import absolute_import
from collections import namedtuple
import pbr.version
__version__ = pbr.version.VersionInfo('instabrade').version_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
Update how version is determinedfrom __future__ import absolute_import
from collections import namedtuple
from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
| <commit_before>from __future__ import absolute_import
from collections import namedtuple
import pbr.version
__version__ = pbr.version.VersionInfo('instabrade').version_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
<commit_msg>Update how version is determined<commit_after>from __future__ import absolute_import
from collections import namedtuple
from pbr.version import VersionInfo
__version__ = VersionInfo('instabrade').semantic_version().release_string()
PageID = namedtuple("PageID", "name css_path attr attr_value")
LOG_IN_IDENTIFIER = PageID(name='Log In Page Identifier',
css_path='h1.coreSpriteLoggedOutWordmark',
attr='class',
attr_value='coreSpriteLoggedOutWordmark')
HOME_IDENTIFIER = PageID(name='Home Page Identifier',
css_path='main[role="main"] > section a[href$="explore/"]',
attr='href',
attr_value='/explore/')
from instabrade.instagram import Instagram # noqa
|
438d78058951179f947480b0340752fa9b372a9d | sqs.py | sqs.py | from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
super(SQSRequest, self).__init__(*args, **kwargs)
| from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
t = datetime.datetime.utcnow()
method = kwargs.get('method', 'GET')
url = kwargs.get('url') or args[0]
params = sorted(url.split('?')[1].split('&'))
canonical_querystring = '&'.join(params)
kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
args = tuple()
host = url.split('://')[1].split('/')[0]
canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
service = 'sqs'
region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
| Add init code to deal with AWS HTTP API | Add init code to deal with AWS HTTP API
| Python | mit | MA3STR0/AsyncAWS | from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
super(SQSRequest, self).__init__(*args, **kwargs)
Add init code to deal with AWS HTTP API | from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
t = datetime.datetime.utcnow()
method = kwargs.get('method', 'GET')
url = kwargs.get('url') or args[0]
params = sorted(url.split('?')[1].split('&'))
canonical_querystring = '&'.join(params)
kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
args = tuple()
host = url.split('://')[1].split('/')[0]
canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
service = 'sqs'
region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
| <commit_before>from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
super(SQSRequest, self).__init__(*args, **kwargs)
<commit_msg>Add init code to deal with AWS HTTP API<commit_after> | from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
t = datetime.datetime.utcnow()
method = kwargs.get('method', 'GET')
url = kwargs.get('url') or args[0]
params = sorted(url.split('?')[1].split('&'))
canonical_querystring = '&'.join(params)
kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
args = tuple()
host = url.split('://')[1].split('/')[0]
canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
service = 'sqs'
region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
| from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
super(SQSRequest, self).__init__(*args, **kwargs)
Add init code to deal with AWS HTTP APIfrom tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
t = datetime.datetime.utcnow()
method = kwargs.get('method', 'GET')
url = kwargs.get('url') or args[0]
params = sorted(url.split('?')[1].split('&'))
canonical_querystring = '&'.join(params)
kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
args = tuple()
host = url.split('://')[1].split('/')[0]
canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
service = 'sqs'
region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
| <commit_before>from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
super(SQSRequest, self).__init__(*args, **kwargs)
<commit_msg>Add init code to deal with AWS HTTP API<commit_after>from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPClient
from tornado.httputil import url_concat
import datetime
import hashlib
import hmac
class SQSRequest(HTTPRequest):
"""SQS AWS Adapter for Tornado HTTP request"""
def __init__(self, *args, **kwargs):
t = datetime.datetime.utcnow()
method = kwargs.get('method', 'GET')
url = kwargs.get('url') or args[0]
params = sorted(url.split('?')[1].split('&'))
canonical_querystring = '&'.join(params)
kwargs['url'] = url.split('?')[0] + '?' + canonical_querystring
args = tuple()
host = url.split('://')[1].split('/')[0]
canonical_uri = url.split('://')[1].split('.com')[1].split('?')[0]
service = 'sqs'
region = kwargs.get('region', 'eu-west-1')
super(SQSRequest, self).__init__(*args, **kwargs)
|
d5c65f6ac2cdae3310f41efb9ab0a6d5cae63357 | kopytka/managers.py | kopytka/managers.py | from django.db import models
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
| from django.db import models
from .transforms import SKeys
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
def fragment_keys(self):
return self.annotate(keys=SKeys('fragments')).values_list('keys', flat=True)
| Add fragment_keys method to PageQuerySet | Add fragment_keys method to PageQuerySet
| Python | mit | funkybob/kopytka,funkybob/kopytka,funkybob/kopytka | from django.db import models
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
Add fragment_keys method to PageQuerySet | from django.db import models
from .transforms import SKeys
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
def fragment_keys(self):
return self.annotate(keys=SKeys('fragments')).values_list('keys', flat=True)
| <commit_before>from django.db import models
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
<commit_msg>Add fragment_keys method to PageQuerySet<commit_after> | from django.db import models
from .transforms import SKeys
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
def fragment_keys(self):
return self.annotate(keys=SKeys('fragments')).values_list('keys', flat=True)
| from django.db import models
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
Add fragment_keys method to PageQuerySetfrom django.db import models
from .transforms import SKeys
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
def fragment_keys(self):
return self.annotate(keys=SKeys('fragments')).values_list('keys', flat=True)
| <commit_before>from django.db import models
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
<commit_msg>Add fragment_keys method to PageQuerySet<commit_after>from django.db import models
from .transforms import SKeys
class PageQuerySet(models.QuerySet):
def published(self):
return self.filter(is_published=True)
def fragment_keys(self):
return self.annotate(keys=SKeys('fragments')).values_list('keys', flat=True)
|
4bf7f15896677b1ffb5678710086e13ff0c3e094 | PyFVCOM/__init__.py | PyFVCOM/__init__.py | """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import plot
from PyFVCOM import utilities
| """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import plot
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import utilities
| Fix sorting of the imports. | Fix sorting of the imports.
| Python | mit | pwcazenave/PyFVCOM | """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import plot
from PyFVCOM import utilities
Fix sorting of the imports. | """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import plot
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import utilities
| <commit_before>"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import plot
from PyFVCOM import utilities
<commit_msg>Fix sorting of the imports.<commit_after> | """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import plot
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import utilities
| """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import plot
from PyFVCOM import utilities
Fix sorting of the imports."""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import plot
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import utilities
| <commit_before>"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import plot
from PyFVCOM import utilities
<commit_msg>Fix sorting of the imports.<commit_after>"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.6.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ll2utm as coordinate_tools
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide_tools
from PyFVCOM import plot
from PyFVCOM import process_results
from PyFVCOM import read_results
from PyFVCOM import utilities
|
fcad1fa7187fe81d80b8861df2851402be01b667 | PyFVCOM/__init__.py | PyFVCOM/__init__.py | """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
| """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave', 'Michael Bedington']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
| Add Mike as a contributor. | Add Mike as a contributor.
| Python | mit | pwcazenave/PyFVCOM | """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
Add Mike as a contributor. | """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave', 'Michael Bedington']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
| <commit_before>"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
<commit_msg>Add Mike as a contributor.<commit_after> | """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave', 'Michael Bedington']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
| """
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
Add Mike as a contributor."""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave', 'Michael Bedington']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
| <commit_before>"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
<commit_msg>Add Mike as a contributor.<commit_after>"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '2.0.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave', 'Michael Bedington']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy
from PyFVCOM import coast
from PyFVCOM import ctd
from PyFVCOM import current
from PyFVCOM import grid
from PyFVCOM import coordinate
from PyFVCOM import ocean
from PyFVCOM import stats
from PyFVCOM import tidal_ellipse
from PyFVCOM import tide
from PyFVCOM import plot
from PyFVCOM import read
from PyFVCOM import utilities
from PyFVCOM import validation
|
cf2615c2488198bd9f904a4e65ac4fc0e0d6c475 | insertion.py | insertion.py | import timeit
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
input = [1, 3, 2]
output = insertion(input)
print output
timeit.timeit(insertion(input))
| import time
def timed_func(func):
"""Decorator for timing our traversal methods."""
def timed(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
elapsed = time.time() - start
# print "time expired: %s" % elapsed
return (result, elapsed)
return timed
@timed_func
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x))
times.append(output[1])
print 'Best case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x)[::-1])
times.append(output[1])
print 'Worse case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
| Add timing to show time complexity. | Add timing to show time complexity.
| Python | mit | bm5w/second_dataS | import timeit
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
input = [1, 3, 2]
output = insertion(input)
print output
timeit.timeit(insertion(input))
Add timing to show time complexity. | import time
def timed_func(func):
"""Decorator for timing our traversal methods."""
def timed(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
elapsed = time.time() - start
# print "time expired: %s" % elapsed
return (result, elapsed)
return timed
@timed_func
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x))
times.append(output[1])
print 'Best case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x)[::-1])
times.append(output[1])
print 'Worse case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
| <commit_before>import timeit
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
input = [1, 3, 2]
output = insertion(input)
print output
timeit.timeit(insertion(input))
<commit_msg>Add timing to show time complexity.<commit_after> | import time
def timed_func(func):
"""Decorator for timing our traversal methods."""
def timed(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
elapsed = time.time() - start
# print "time expired: %s" % elapsed
return (result, elapsed)
return timed
@timed_func
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x))
times.append(output[1])
print 'Best case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x)[::-1])
times.append(output[1])
print 'Worse case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
| import timeit
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
input = [1, 3, 2]
output = insertion(input)
print output
timeit.timeit(insertion(input))
Add timing to show time complexity.import time
def timed_func(func):
"""Decorator for timing our traversal methods."""
def timed(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
elapsed = time.time() - start
# print "time expired: %s" % elapsed
return (result, elapsed)
return timed
@timed_func
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x))
times.append(output[1])
print 'Best case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x)[::-1])
times.append(output[1])
print 'Worse case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
| <commit_before>import timeit
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
input = [1, 3, 2]
output = insertion(input)
print output
timeit.timeit(insertion(input))
<commit_msg>Add timing to show time complexity.<commit_after>import time
def timed_func(func):
"""Decorator for timing our traversal methods."""
def timed(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
elapsed = time.time() - start
# print "time expired: %s" % elapsed
return (result, elapsed)
return timed
@timed_func
def insertion(_list):
'''Sorts a list via the insertion method.'''
if type(_list) is not list:
raise TypeError('Entire list must be numbers')
for i in range(1, len(_list)):
key = _list[i]
if not isinstance(key, int):
raise TypeError('Entire list must be numbers')
position = i
while position > 0 and _list[position-1] > key:
_list[position] = _list[position-1]
position = position-1
_list[position] = key
return _list
if __name__ == '__main__':
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x))
times.append(output[1])
print 'Best case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
lengths = [10, 100, 1000, 10000]
times = []
for x in lengths:
output = insertion(range(x)[::-1])
times.append(output[1])
print 'Worse case scenario:'
for length, tim in zip(lengths, times):
print 'a list of length {} was sorted in {}'.format(length, tim)
diff = []
for x in range(len(times)-2):
diff.append(times[x+1]/times[x])
average = reduce(lambda x, y: x+y, diff) / len(diff)
print 'As length increases by 10, time increases by {}'.format(average)
|
fb8db56ca83a18860ed1ae279d3f390456e224fe | cinder/brick/initiator/host_driver.py | cinder/brick/initiator/host_driver.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
dir = "/dev/disk/by-path/"
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
files = []
dir = "/dev/disk/by-path/"
if os.path.isdir(dir):
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
| Check if dir exists before calling listdir | Check if dir exists before calling listdir
Changes along the way to how we clean up and detach after
copying an image to a volume exposed a problem in the cleanup
of the brick/initiator routines.
The clean up in the initiator detach was doing a blind listdir
of /dev/disk/by-path, however due to detach and cleanup being
called upon completion of the image download to the volume if
there are no other devices mapped in this directory the directory
is removed.
The result was that even though the create and copy of the image
was succesful, the HostDriver code called os.lisdir on a directory
that doesn't exist any longer and raises an unhandled exception that
cause the taskflow mechanism to mark the volume as failed.
Change-Id: I488755c1a49a77f42efbb58a7a4eb6f4f084df07
Closes-bug: #1243980
(cherry picked from commit 1766a5acc5c948288b4cd81c62d0c1507c55f727)
| Python | apache-2.0 | rickerc/cinder_audit,rickerc/cinder_audit | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
dir = "/dev/disk/by-path/"
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
Check if dir exists before calling listdir
Changes along the way to how we clean up and detach after
copying an image to a volume exposed a problem in the cleanup
of the brick/initiator routines.
The clean up in the initiator detach was doing a blind listdir
of /dev/disk/by-path, however due to detach and cleanup being
called upon completion of the image download to the volume if
there are no other devices mapped in this directory the directory
is removed.
The result was that even though the create and copy of the image
was succesful, the HostDriver code called os.lisdir on a directory
that doesn't exist any longer and raises an unhandled exception that
cause the taskflow mechanism to mark the volume as failed.
Change-Id: I488755c1a49a77f42efbb58a7a4eb6f4f084df07
Closes-bug: #1243980
(cherry picked from commit 1766a5acc5c948288b4cd81c62d0c1507c55f727) | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
files = []
dir = "/dev/disk/by-path/"
if os.path.isdir(dir):
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
| <commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
dir = "/dev/disk/by-path/"
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
<commit_msg>Check if dir exists before calling listdir
Changes along the way to how we clean up and detach after
copying an image to a volume exposed a problem in the cleanup
of the brick/initiator routines.
The clean up in the initiator detach was doing a blind listdir
of /dev/disk/by-path, however due to detach and cleanup being
called upon completion of the image download to the volume if
there are no other devices mapped in this directory the directory
is removed.
The result was that even though the create and copy of the image
was succesful, the HostDriver code called os.lisdir on a directory
that doesn't exist any longer and raises an unhandled exception that
cause the taskflow mechanism to mark the volume as failed.
Change-Id: I488755c1a49a77f42efbb58a7a4eb6f4f084df07
Closes-bug: #1243980
(cherry picked from commit 1766a5acc5c948288b4cd81c62d0c1507c55f727)<commit_after> | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
files = []
dir = "/dev/disk/by-path/"
if os.path.isdir(dir):
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
dir = "/dev/disk/by-path/"
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
Check if dir exists before calling listdir
Changes along the way to how we clean up and detach after
copying an image to a volume exposed a problem in the cleanup
of the brick/initiator routines.
The clean up in the initiator detach was doing a blind listdir
of /dev/disk/by-path, however due to detach and cleanup being
called upon completion of the image download to the volume if
there are no other devices mapped in this directory the directory
is removed.
The result was that even though the create and copy of the image
was succesful, the HostDriver code called os.lisdir on a directory
that doesn't exist any longer and raises an unhandled exception that
cause the taskflow mechanism to mark the volume as failed.
Change-Id: I488755c1a49a77f42efbb58a7a4eb6f4f084df07
Closes-bug: #1243980
(cherry picked from commit 1766a5acc5c948288b4cd81c62d0c1507c55f727)# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
files = []
dir = "/dev/disk/by-path/"
if os.path.isdir(dir):
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
| <commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
dir = "/dev/disk/by-path/"
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
<commit_msg>Check if dir exists before calling listdir
Changes along the way to how we clean up and detach after
copying an image to a volume exposed a problem in the cleanup
of the brick/initiator routines.
The clean up in the initiator detach was doing a blind listdir
of /dev/disk/by-path, however due to detach and cleanup being
called upon completion of the image download to the volume if
there are no other devices mapped in this directory the directory
is removed.
The result was that even though the create and copy of the image
was succesful, the HostDriver code called os.lisdir on a directory
that doesn't exist any longer and raises an unhandled exception that
cause the taskflow mechanism to mark the volume as failed.
Change-Id: I488755c1a49a77f42efbb58a7a4eb6f4f084df07
Closes-bug: #1243980
(cherry picked from commit 1766a5acc5c948288b4cd81c62d0c1507c55f727)<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
class HostDriver(object):
def get_all_block_devices(self):
"""Get the list of all block devices seen in /dev/disk/by-path/."""
files = []
dir = "/dev/disk/by-path/"
if os.path.isdir(dir):
files = os.listdir(dir)
devices = []
for file in files:
devices.append(dir + file)
return devices
|
d7157d2999a4d9a8f624c3b509726b49d9193a01 | conllu/compat.py | conllu/compat.py | try:
from io import StringIO
except ImportError:
from StringIO import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
| from io import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
| Remove special case from StringIO. | Remove special case from StringIO.
| Python | mit | EmilStenstrom/conllu | try:
from io import StringIO
except ImportError:
from StringIO import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
Remove special case from StringIO. | from io import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
| <commit_before>try:
from io import StringIO
except ImportError:
from StringIO import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
<commit_msg>Remove special case from StringIO.<commit_after> | from io import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
| try:
from io import StringIO
except ImportError:
from StringIO import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
Remove special case from StringIO.from io import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
| <commit_before>try:
from io import StringIO
except ImportError:
from StringIO import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
<commit_msg>Remove special case from StringIO.<commit_after>from io import StringIO
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
from contextlib import redirect_stdout
except ImportError:
import contextlib
import sys
@contextlib.contextmanager
def redirect_stdout(target):
original = sys.stdout
sys.stdout = target
yield
sys.stdout = original
def string_to_file(string):
return StringIO(text(string) if string else None)
def capture_print(func, args=None):
f = StringIO()
with redirect_stdout(f):
if args:
func(args)
else:
func()
return f.getvalue()
try:
from re import fullmatch
except ImportError:
from re import match
def fullmatch(regex, *args, **kwargs):
if not regex.pattern.endswith("$"):
return match(regex.pattern + "$", *args, flags=regex.flags, **kwargs)
return match(regex.pattern, *args, **kwargs)
try:
unicode('')
except NameError:
unicode = str
def text(value):
return unicode(value)
|
d1e1ce5612e1437b2776043f3b6276be5b1d25a6 | csv_converter.py | csv_converter.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
return {
'product_code': int(row[self.source_product_code]),
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
| Add checking empty product code | Add checking empty product code
| Python | mit | stormaaja/csvconverter,stormaaja/csvconverter,stormaaja/csvconverter | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
return {
'product_code': int(row[self.source_product_code]),
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
Add checking empty product code | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
return {
'product_code': int(row[self.source_product_code]),
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
<commit_msg>Add checking empty product code<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
return {
'product_code': int(row[self.source_product_code]),
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
Add checking empty product code#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
return {
'product_code': int(row[self.source_product_code]),
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
<commit_msg>Add checking empty product code<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
class CsvConverter:
def __init__(self, csv_file_path):
self.csv_file_path = csv_file_path
self.rows = []
self.source_product_code = "product_code"
self.source_quantity = "quantity"
def clear(self):
self.rows = []
def addRow(self, row):
self.rows.append(row)
def getRow(self, index):
return self.rows[index]
def setSourceColumns(self, source_product_code, source_quantity):
self.source_product_code = source_product_code
self.source_quantity = source_quantity
def setTargetColumns(self, target_product_code, target_quantity):
self.target_product_code = target_product_code
self.target_quantity = target_quantity
def convertRow(self, row):
if not row[self.source_product_code]:
raise ValueError
return {
'product_code': row[self.source_product_code],
'quantity': int(row[self.source_quantity])
}
def read_file(self):
with open(self.csv_file_path, 'rb') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
self.addRow(self.convertRow(row))
|
d78188713ffd3e36514ba0db5f74bae111e6a7dc | calc.py | calc.py | """calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
| """calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
else:
usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]"
print(usage)
| Add usage string for fallthrough cases | Add usage string for fallthrough cases
| Python | bsd-3-clause | mkuiper/calc-1 | """calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
Add usage string for fallthrough cases | """calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
else:
usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]"
print(usage)
| <commit_before>"""calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
<commit_msg>Add usage string for fallthrough cases<commit_after> | """calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
else:
usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]"
print(usage)
| """calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
Add usage string for fallthrough cases"""calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
else:
usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]"
print(usage)
| <commit_before>"""calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
<commit_msg>Add usage string for fallthrough cases<commit_after>"""calc.py: A simple calculator."""
import sys
def add_all(nums):
return sum(nums)
def multiply_all(nums):
return reduce(lambda a, b: a * b, nums)
if __name__ == '__main__':
command = sys.argv[1]
nums = map(float, sys.argv[2:])
if command == 'add':
print(add_all(nums))
elif command == 'multiply':
print(multiply_all(nums))
else:
usage = "calc.py [add|multiply] NUM1 [NUM2 [NUM3 [...]]]"
print(usage)
|
b43504e09881a92525ae18ef76591f7c2ebe5f8c | newsman/watchdog/clean_process.py | newsman/watchdog/clean_process.py | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
| #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -15"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
| Change process killing from -9 to -15 | Change process killing from -9 to -15
| Python | agpl-3.0 | chengdujin/newsman,chengdujin/newsman,chengdujin/newsman | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
Change process killing from -9 to -15 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -15"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
| <commit_before>#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
<commit_msg>Change process killing from -9 to -15<commit_after> | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -15"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
| #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
Change process killing from -9 to -15#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -15"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
| <commit_before>#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
<commit_msg>Change process killing from -9 to -15<commit_after>#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -15"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
3ac6f578397235e8eda686fe3589cda780af53d5 | ginga/qtw/Plot.py | ginga/qtw/Plot.py | #
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit in ('qt', 'qt4'):
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
elif toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
| #
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
else:
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
| Fix for import error with matplotlib Qt4Agg backend | Fix for import error with matplotlib Qt4Agg backend
| Python | bsd-3-clause | stscieisenhamer/ginga,ejeschke/ginga,sosey/ginga,Cadair/ginga,rupak0577/ginga,eteq/ginga,rajul/ginga,ejeschke/ginga,pllim/ginga,ejeschke/ginga,sosey/ginga,naojsoft/ginga,naojsoft/ginga,Cadair/ginga,rupak0577/ginga,rajul/ginga,eteq/ginga,stscieisenhamer/ginga,rupak0577/ginga,pllim/ginga,sosey/ginga,stscieisenhamer/ginga,pllim/ginga,Cadair/ginga,rajul/ginga,naojsoft/ginga,eteq/ginga | #
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit in ('qt', 'qt4'):
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
elif toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
Fix for import error with matplotlib Qt4Agg backend | #
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
else:
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
| <commit_before>#
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit in ('qt', 'qt4'):
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
elif toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
<commit_msg>Fix for import error with matplotlib Qt4Agg backend<commit_after> | #
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
else:
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
| #
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit in ('qt', 'qt4'):
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
elif toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
Fix for import error with matplotlib Qt4Agg backend#
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
else:
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
| <commit_before>#
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit in ('qt', 'qt4'):
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
elif toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
<commit_msg>Fix for import error with matplotlib Qt4Agg backend<commit_after>#
# Plot.py -- Plotting function for Ginga FITS viewer.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
# GUI imports
from ginga.qtw.QtHelp import QtGui, QtCore
from ginga.qtw import QtHelp
from ginga.toolkit import toolkit
import matplotlib
if toolkit == 'qt5':
# qt5 backend is not yet released in matplotlib stable
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg \
as FigureCanvas
else:
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg \
as FigureCanvas
from ginga.base.PlotBase import PlotBase, HistogramMixin, CutsMixin
class Plot(PlotBase):
def __init__(self, logger, width=300, height=300, dpi=100):
PlotBase.__init__(self, logger, FigureCanvas,
width=width, height=height, dpi=dpi)
class Histogram(Plot, HistogramMixin):
pass
class Cuts(Plot, CutsMixin):
pass
#END
|
8ccbddffc2c41cbe623439c76cfde7097f5fa801 | nighttrain/utils.py | nighttrain/utils.py | # Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list):
if isinstance(string_or_list, str):
return [string_or_list]
else:
return string_or_list
| # Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list_or_none):
if isinstance(string_or_list_or_none, str):
return [string_or_list_or_none]
else:
return string_or_list_or_none or []
| Fix crash when there are no includes for a task | Fix crash when there are no includes for a task
| Python | apache-2.0 | ssssam/nightbus,ssssam/nightbus | # Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list):
if isinstance(string_or_list, str):
return [string_or_list]
else:
return string_or_list
Fix crash when there are no includes for a task | # Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list_or_none):
if isinstance(string_or_list_or_none, str):
return [string_or_list_or_none]
else:
return string_or_list_or_none or []
| <commit_before># Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list):
if isinstance(string_or_list, str):
return [string_or_list]
else:
return string_or_list
<commit_msg>Fix crash when there are no includes for a task<commit_after> | # Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list_or_none):
if isinstance(string_or_list_or_none, str):
return [string_or_list_or_none]
else:
return string_or_list_or_none or []
| # Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list):
if isinstance(string_or_list, str):
return [string_or_list]
else:
return string_or_list
Fix crash when there are no includes for a task# Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list_or_none):
if isinstance(string_or_list_or_none, str):
return [string_or_list_or_none]
else:
return string_or_list_or_none or []
| <commit_before># Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list):
if isinstance(string_or_list, str):
return [string_or_list]
else:
return string_or_list
<commit_msg>Fix crash when there are no includes for a task<commit_after># Copyright 2017 Codethink Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Utility functions.'''
def ensure_list(string_or_list_or_none):
if isinstance(string_or_list_or_none, str):
return [string_or_list_or_none]
else:
return string_or_list_or_none or []
|
f0b27af3cc09808146442c94df7c76127776acf8 | gslib/devshell_auth_plugin.py | gslib/devshell_auth_plugin.py | # -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
if provider != 'gs':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
| # -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
# Provider here is a boto.provider.Provider object (as opposed to the
# provider attribute of CloudApi objects, which is a string).
if provider.name != 'google':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
| Fix provider check causing Devshell auth failure | Fix provider check causing Devshell auth failure
This commit builds on commit 13c4926, allowing Devshell credentials to
be used only with Google storage.
| Python | apache-2.0 | GoogleCloudPlatform/gsutil,GoogleCloudPlatform/gsutil,fishjord/gsutil,BrandonY/gsutil | # -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
if provider != 'gs':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
Fix provider check causing Devshell auth failure
This commit builds on commit 13c4926, allowing Devshell credentials to
be used only with Google storage. | # -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
# Provider here is a boto.provider.Provider object (as opposed to the
# provider attribute of CloudApi objects, which is a string).
if provider.name != 'google':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
| <commit_before># -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
if provider != 'gs':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
<commit_msg>Fix provider check causing Devshell auth failure
This commit builds on commit 13c4926, allowing Devshell credentials to
be used only with Google storage.<commit_after> | # -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
# Provider here is a boto.provider.Provider object (as opposed to the
# provider attribute of CloudApi objects, which is a string).
if provider.name != 'google':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
| # -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
if provider != 'gs':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
Fix provider check causing Devshell auth failure
This commit builds on commit 13c4926, allowing Devshell credentials to
be used only with Google storage.# -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
# Provider here is a boto.provider.Provider object (as opposed to the
# provider attribute of CloudApi objects, which is a string).
if provider.name != 'google':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
| <commit_before># -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
if provider != 'gs':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
<commit_msg>Fix provider check causing Devshell auth failure
This commit builds on commit 13c4926, allowing Devshell credentials to
be used only with Google storage.<commit_after># -*- coding: utf-8 -*-
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Developer Shell auth bridge.
This enables Boto API auth in Developer Shell environment.
"""
from __future__ import absolute_import
from boto.auth_handler import AuthHandler
from boto.auth_handler import NotReadyToAuthenticate
import oauth2client.contrib.devshell as devshell
class DevshellAuth(AuthHandler):
"""Developer Shell authorization plugin class."""
capability = ['s3']
def __init__(self, path, config, provider):
# Provider here is a boto.provider.Provider object (as opposed to the
# provider attribute of CloudApi objects, which is a string).
if provider.name != 'google':
# Devshell credentials are valid for Google only and can't be used for s3.
raise NotReadyToAuthenticate()
try:
self.creds = devshell.DevshellCredentials()
except:
raise NotReadyToAuthenticate()
def add_auth(self, http_request):
http_request.headers['Authorization'] = ('Bearer %s' %
self.creds.access_token)
|
519a5afc8c8561166f4d8fb0ca43f0ff35a0389b | addons/hr_payroll_account/__manifest__.py | addons/hr_payroll_account/__manifest__.py | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account', 'hr_expense'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
| #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
| Remove useless dependency to hr_expense | [IMP] hr_payroll_account: Remove useless dependency to hr_expense
| Python | agpl-3.0 | ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account', 'hr_expense'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
[IMP] hr_payroll_account: Remove useless dependency to hr_expense | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
| <commit_before>#-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account', 'hr_expense'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
<commit_msg>[IMP] hr_payroll_account: Remove useless dependency to hr_expense<commit_after> | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
| #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account', 'hr_expense'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
[IMP] hr_payroll_account: Remove useless dependency to hr_expense#-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
| <commit_before>#-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account', 'hr_expense'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
<commit_msg>[IMP] hr_payroll_account: Remove useless dependency to hr_expense<commit_after>#-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Payroll Accounting',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'depends': ['hr_payroll', 'account'],
'data': ['views/hr_payroll_account_views.xml'],
'demo': ['data/hr_payroll_account_demo.xml'],
'test': ['../account/test/account_minimal_test.xml'],
}
|
84f111f6b5029fc86645311866310b5de48a39e3 | mqo_program/__openerp__.py | mqo_program/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
} | # -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base', 'mqo_website'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
} | Add required dependency to mqo_programs. | [IMP] Add required dependency to mqo_programs. | Python | agpl-3.0 | drummingbird/mqo,drummingbird/mqo | # -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
}[IMP] Add required dependency to mqo_programs. | # -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base', 'mqo_website'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
} | <commit_before># -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
}<commit_msg>[IMP] Add required dependency to mqo_programs.<commit_after> | # -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base', 'mqo_website'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
} | # -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
}[IMP] Add required dependency to mqo_programs.# -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base', 'mqo_website'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
} | <commit_before># -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
}<commit_msg>[IMP] Add required dependency to mqo_programs.<commit_after># -*- coding: utf-8 -*-
{
'name': "MQO Programs",
'summary': """Manage programs""",
'description': """
MQO module for managing programs:
""",
'author': "Your Company",
'website': "http://www.yourcompany.com",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Test',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base', 'mqo_website'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
'views/program.xml',
'views/benefit.xml',
'templates/program.xml',
],
# only loaded in demonstration mode
'demo': [
# 'demo.xml',
],
} |
671a932682f37912b11413f989ad52cf6b046ed6 | basex-api/src/main/python/QueryExample.py | basex-api/src/main/python/QueryExample.py | # This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
# loop through all results
while query.more():
print query.next()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
| # This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
print query.execute()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
| Fix a bug on a query example for python | Fix a bug on a query example for python
Methods used by the former example, `query.more()` and `query.next()`, do not exist any longer.
I've modified them to `query.execute()`, according to `BaseXClient.py`, to make it run as good as it should be. | Python | bsd-3-clause | ksclarke/basex,deshmnnit04/basex,dimitarp/basex,joansmith/basex,joansmith/basex,dimitarp/basex,ksclarke/basex,drmacro/basex,dimitarp/basex,dimitarp/basex,BaseXdb/basex,joansmith/basex,vincentml/basex,drmacro/basex,joansmith/basex,BaseXdb/basex,BaseXdb/basex,JensErat/basex,joansmith/basex,JensErat/basex,ksclarke/basex,JensErat/basex,dimitarp/basex,vincentml/basex,dimitarp/basex,deshmnnit04/basex,vincentml/basex,dimitarp/basex,BaseXdb/basex,vincentml/basex,JensErat/basex,drmacro/basex,drmacro/basex,deshmnnit04/basex,BaseXdb/basex,deshmnnit04/basex,ksclarke/basex,deshmnnit04/basex,deshmnnit04/basex,BaseXdb/basex,BaseXdb/basex,JensErat/basex,JensErat/basex,joansmith/basex,ksclarke/basex,dimitarp/basex,joansmith/basex,BaseXdb/basex,drmacro/basex,drmacro/basex,vincentml/basex,JensErat/basex,JensErat/basex,joansmith/basex,vincentml/basex,dimitarp/basex,deshmnnit04/basex,deshmnnit04/basex,vincentml/basex,joansmith/basex,drmacro/basex,ksclarke/basex,drmacro/basex,ksclarke/basex,BaseXdb/basex,drmacro/basex,dimitarp/basex,joansmith/basex,joansmith/basex,JensErat/basex,BaseXdb/basex,ksclarke/basex,ksclarke/basex,vincentml/basex,drmacro/basex,JensErat/basex,vincentml/basex,ksclarke/basex,BaseXdb/basex,JensErat/basex,vincentml/basex,ksclarke/basex,vincentml/basex,vincentml/basex,dimitarp/basex,drmacro/basex,drmacro/basex,JensErat/basex,dimitarp/basex,deshmnnit04/basex,joansmith/basex,deshmnnit04/basex,deshmnnit04/basex,deshmnnit04/basex,ksclarke/basex,BaseXdb/basex | # This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
# loop through all results
while query.more():
print query.next()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
Fix a bug on a query example for python
Methods used by the former example, `query.more()` and `query.next()`, do not exist any longer.
I've modified them to `query.execute()`, according to `BaseXClient.py`, to make it run as good as it should be. | # This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
print query.execute()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
| <commit_before># This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
# loop through all results
while query.more():
print query.next()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
<commit_msg>Fix a bug on a query example for python
Methods used by the former example, `query.more()` and `query.next()`, do not exist any longer.
I've modified them to `query.execute()`, according to `BaseXClient.py`, to make it run as good as it should be.<commit_after> | # This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
print query.execute()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
| # This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
# loop through all results
while query.more():
print query.next()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
Fix a bug on a query example for python
Methods used by the former example, `query.more()` and `query.next()`, do not exist any longer.
I've modified them to `query.execute()`, according to `BaseXClient.py`, to make it run as good as it should be.# This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
print query.execute()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
| <commit_before># This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
# loop through all results
while query.more():
print query.next()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
<commit_msg>Fix a bug on a query example for python
Methods used by the former example, `query.more()` and `query.next()`, do not exist any longer.
I've modified them to `query.execute()`, according to `BaseXClient.py`, to make it run as good as it should be.<commit_after># This example shows how queries can be executed in an iterative manner.
# Iterative evaluation will be slower, as more server requests are performed.
#
# Documentation: http://docs.basex.org/wiki/Clients
#
# (C) BaseX Team 2005-12, BSD License
import BaseXClient, time
try:
# create session
session = BaseXClient.Session('localhost', 1984, 'admin', 'admin')
try:
# create query instance
input = "for $i in 1 to 10 return <xml>Text { $i }</xml>"
query = session.query(input)
print query.execute()
# close query object
query.close()
except IOError as e:
# print exception
print e
# close session
session.close()
except IOError as e:
# print exception
print e
|
c6cf2fbe34f536f4c2f25e7359c6cdf1d05a55cb | image_analysis.py | image_analysis.py | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
return result | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
result['X'] -= 1
result['Y'] -= 1
return result | Fix extracted star coordinates from 1- to 0-based indexing | Fix extracted star coordinates from 1- to 0-based indexing
Note that center of first pixel is 0, ie. edge of first pixel is -0.5
| Python | mit | lkangas/python-tycho2 | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
return resultFix extracted star coordinates from 1- to 0-based indexing
Note that center of first pixel is 0, ie. edge of first pixel is -0.5 | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
result['X'] -= 1
result['Y'] -= 1
return result | <commit_before># -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
return result<commit_msg>Fix extracted star coordinates from 1- to 0-based indexing
Note that center of first pixel is 0, ie. edge of first pixel is -0.5<commit_after> | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
result['X'] -= 1
result['Y'] -= 1
return result | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
return resultFix extracted star coordinates from 1- to 0-based indexing
Note that center of first pixel is 0, ie. edge of first pixel is -0.5# -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
result['X'] -= 1
result['Y'] -= 1
return result | <commit_before># -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
return result<commit_msg>Fix extracted star coordinates from 1- to 0-based indexing
Note that center of first pixel is 0, ie. edge of first pixel is -0.5<commit_after># -*- coding: utf-8 -*-
"""
Created on Mon Dec 25 15:19:55 2017
@author: vostok
"""
import os
import tempfile
from astropy.io import fits
def extract_stars(input_array):
(infilehandle, infilepath) = tempfile.mkstemp(suffix='.fits')
os.close(infilehandle)
fits.writeto(infilepath, \
input_array.astype('float32'), \
fits.Header(), \
overwrite=True)
return_code = os.system('image2xy -O {}'.format(infilepath))
if return_code != 0:
raise "image2xy returned with error code %d" % return_code
result = fits.open(infilepath.replace('.fits', '.xy.fits'))[1].data
os.unlink(infilepath)
result['X'] -= 1
result['Y'] -= 1
return result |
5b4684b3a5b2c37c23fb83bc14ceda6cf7c01412 | ironic/tests/unit/__init__.py | ironic/tests/unit/__init__.py | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
| # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
| Stop adding translation function to builtins | Stop adding translation function to builtins
In unittests __init__ translation function is still being added to
builtins, this is not required anymore as it is not being installed.
Change-Id: I19da395b72622a6db348f5a6dd569c7747eaa40d
| Python | apache-2.0 | SauloAislan/ironic,NaohiroTamura/ironic,openstack/ironic,pshchelo/ironic,hpproliant/ironic,devananda/ironic,ionutbalutoiu/ironic,dims/ironic,bacaldwell/ironic,ionutbalutoiu/ironic,bacaldwell/ironic,NaohiroTamura/ironic,openstack/ironic,pshchelo/ironic,dims/ironic,SauloAislan/ironic | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
Stop adding translation function to builtins
In unittests __init__ translation function is still being added to
builtins, this is not required anymore as it is not being installed.
Change-Id: I19da395b72622a6db348f5a6dd569c7747eaa40d | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
| <commit_before># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
<commit_msg>Stop adding translation function to builtins
In unittests __init__ translation function is still being added to
builtins, this is not required anymore as it is not being installed.
Change-Id: I19da395b72622a6db348f5a6dd569c7747eaa40d<commit_after> | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
| # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
Stop adding translation function to builtins
In unittests __init__ translation function is still being added to
builtins, this is not required anymore as it is not being installed.
Change-Id: I19da395b72622a6db348f5a6dd569c7747eaa40d# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
| <commit_before># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import six.moves.builtins as __builtin__
setattr(__builtin__, '_', lambda x: x)
<commit_msg>Stop adding translation function to builtins
In unittests __init__ translation function is still being added to
builtins, this is not required anymore as it is not being installed.
Change-Id: I19da395b72622a6db348f5a6dd569c7747eaa40d<commit_after># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`ironic.tests.unit` -- ironic unit tests
=====================================================
.. automodule:: ironic.tests.unit
:platform: Unix
"""
# TODO(deva): move eventlet imports to ironic.__init__ once we move to PBR
import eventlet
eventlet.monkey_patch(os=False)
|
35c66f3ade85b6b7b4e19c95b0d6a09e53b12bee | src/pip/_internal/models/index.py | src/pip/_internal/models/index.py | from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
| from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def _url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
| Fix a mistake made while merging | Fix a mistake made while merging
| Python | mit | xavfernandez/pip,rouge8/pip,pfmoore/pip,techtonik/pip,rouge8/pip,pradyunsg/pip,pypa/pip,rouge8/pip,xavfernandez/pip,xavfernandez/pip,sbidoul/pip,techtonik/pip,pypa/pip,pradyunsg/pip,pfmoore/pip,sbidoul/pip,techtonik/pip | from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
Fix a mistake made while merging | from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def _url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
| <commit_before>from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
<commit_msg>Fix a mistake made while merging<commit_after> | from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def _url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
| from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
Fix a mistake made while mergingfrom pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def _url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
| <commit_before>from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
<commit_msg>Fix a mistake made while merging<commit_after>from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
def __init__(self, url, file_storage_domain):
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def _url_to_path(self, path):
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex('https://pypi.org/', 'files.pythonhosted.org')
TestPyPI = PackageIndex('https://test.pypi.org/', 'test-files.pythonhosted.org')
|
73660f4f539a1aeb520c33112cfc41183e4dd43a | luigi/tasks/rfam/clans_csv.py | luigi/tasks/rfam/clans_csv.py | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam.clans import parse
from tasks.config import rfam
from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return FetchTask(
remote_path=conf.query('clans.sql'),
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(parse(raw))
| # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam import clans
from tasks.config import rfam
from tasks.utils.writers import CsvOutput
from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return MysqlQueryTask(
db=conf,
query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(clans.parse(raw))
| Use MysqlQueryTask for getting clan data | Use MysqlQueryTask for getting clan data
| Python | apache-2.0 | RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam.clans import parse
from tasks.config import rfam
from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return FetchTask(
remote_path=conf.query('clans.sql'),
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(parse(raw))
Use MysqlQueryTask for getting clan data | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam import clans
from tasks.config import rfam
from tasks.utils.writers import CsvOutput
from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return MysqlQueryTask(
db=conf,
query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(clans.parse(raw))
| <commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam.clans import parse
from tasks.config import rfam
from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return FetchTask(
remote_path=conf.query('clans.sql'),
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(parse(raw))
<commit_msg>Use MysqlQueryTask for getting clan data<commit_after> | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam import clans
from tasks.config import rfam
from tasks.utils.writers import CsvOutput
from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return MysqlQueryTask(
db=conf,
query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(clans.parse(raw))
| # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam.clans import parse
from tasks.config import rfam
from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return FetchTask(
remote_path=conf.query('clans.sql'),
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(parse(raw))
Use MysqlQueryTask for getting clan data# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam import clans
from tasks.config import rfam
from tasks.utils.writers import CsvOutput
from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return MysqlQueryTask(
db=conf,
query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(clans.parse(raw))
| <commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam.clans import parse
from tasks.config import rfam
from tasks.utils.fetch import FetchTask
from tasks.utils.writers import CsvOutput
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return FetchTask(
remote_path=conf.query('clans.sql'),
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(parse(raw))
<commit_msg>Use MysqlQueryTask for getting clan data<commit_after># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import operator as op
import luigi
from databases.rfam import clans
from tasks.config import rfam
from tasks.utils.writers import CsvOutput
from tasks.utils.mysql import MysqlQueryTask
class RfamClansCSV(luigi.Task):
def requires(self):
conf = rfam()
return MysqlQueryTask(
db=conf,
query=clans.QUERY,
local_path=conf.raw('clans.tsv'),
)
def output(self):
conf = rfam()
return CsvOutput(
conf.clans,
['id', 'name', 'description', 'family_count'],
op.methodcaller('writeable'),
)
def run(self):
with self.requires().output.open('r') as raw:
self.output().populate(clans.parse(raw))
|
d0ccfd4558b9dcf1610140c9df95cec284f0fbe3 | correos_project/correos/managers.py | correos_project/correos/managers.py | from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
| from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
if len(realname) == 0:
realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
| Use email username if no realname is found in header | Use email username if no realname is found in header
| Python | bsd-3-clause | transcode-de/correos,transcode-de/correos,transcode-de/correos | from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
Use email username if no realname is found in header | from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
if len(realname) == 0:
realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
| <commit_before>from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
<commit_msg>Use email username if no realname is found in header<commit_after> | from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
if len(realname) == 0:
realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
| from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
Use email username if no realname is found in headerfrom email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
if len(realname) == 0:
realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
| <commit_before>from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
<commit_msg>Use email username if no realname is found in header<commit_after>from email import message_from_string, utils
import json
from django.db import models
from dateutil.parser import parse
class EmailManager(models.Manager):
def create_from_message(self, mailfrom, rcpttos, data):
from .models import Recipient
message = message_from_string(data)
realnames = {}
for rcptto in message['To'].split(','):
realname, email = utils.parseaddr(rcptto)
if len(realname) == 0:
realname = email.split('@')[0]
realnames[email] = realname
emails = []
for rcptto in rcpttos:
recipient, created = Recipient.objects.get_or_create(email=rcptto,
defaults={'realname': realnames[rcptto]})
email = self.model(sender=mailfrom, recipient=recipient)
email.date = message.get('Date')
if email.date is not None:
email.date = parse(email.date)
email.message_id = message['Message-ID']
email.subject = message['Subject']
email.header = json.dumps(dict(message.items()))
email.body = message.get_payload()
email.save()
emails.append(email)
return emails
|
b8b18160e4dad9d87bfdf4207b3cf4841af0140d | examples/dot/dot.py | examples/dot/dot.py | """\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
| """\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
# Validate arguments
if address and not (host or port):
print 'Error: Invalid address', repr(address)
return
# Default values
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
| Add validation to example script. | Add validation to example script.
| Python | mit | joeyespo/path-and-address | """\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
Add validation to example script. | """\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
# Validate arguments
if address and not (host or port):
print 'Error: Invalid address', repr(address)
return
# Default values
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
| <commit_before>"""\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
<commit_msg>Add validation to example script.<commit_after> | """\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
# Validate arguments
if address and not (host or port):
print 'Error: Invalid address', repr(address)
return
# Default values
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
| """\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
Add validation to example script."""\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
# Validate arguments
if address and not (host or port):
print 'Error: Invalid address', repr(address)
return
# Default values
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
| <commit_before>"""\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
<commit_msg>Add validation to example script.<commit_after>"""\
Usage:
dot.py [options] [<path>] [<address>]
dot.py -h | --help
dot.py --version
Where:
<path> is the file to serve
<address> is what to listen on, of the form <host>[:<port>], or just <port>
"""
import sys
from docopt import docopt
from path_and_address import resolve, split_address
def main(args=None):
"""The entry point of the application."""
if args is None:
args = sys.argv[1:]
# Parse command-line
args = docopt(__doc__, argv=args)
# Parse arguments
path, address = resolve(args['<path>'], args['<address>'])
host, port = split_address(address)
# Validate arguments
if address and not (host or port):
print 'Error: Invalid address', repr(address)
return
# Default values
if path is None:
path = '.'
if host is None:
host = 'localhost'
if port is None:
port = 5000
# Run server
print ' * Serving %s on http://%s:%s/' % (path, host, port)
if __name__ == '__main__':
main()
|
446a760261ce4f8e8e210b2a29324c749f2bfdfb | inspector/urls.py | inspector/urls.py | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
admin.autodiscover()
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
| from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
| Remove admin autodiscovery since Django does that for us now | Remove admin autodiscovery since Django does that for us now
| Python | bsd-2-clause | refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
admin.autodiscover()
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
Remove admin autodiscovery since Django does that for us now | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
| <commit_before>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
admin.autodiscover()
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
<commit_msg>Remove admin autodiscovery since Django does that for us now<commit_after> | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
| from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
admin.autodiscover()
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
Remove admin autodiscovery since Django does that for us nowfrom django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
| <commit_before>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
admin.autodiscover()
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
<commit_msg>Remove admin autodiscovery since Django does that for us now<commit_after>from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from cbv.views import HomeView, Sitemap
urlpatterns = [
url(r'^$', HomeView.as_view(), name='home'),
url(r'^projects/', include('cbv.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^sitemap\.xml$', Sitemap.as_view(), name='sitemap'),
url(r'^', include('cbv.shortcut_urls'), {'package': 'Django'}),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
urlpatterns += [
url(r'^404/$', TemplateView.as_view(template_name='404.html')),
url(r'^500/$', TemplateView.as_view(template_name='500.html')),
]
|
66091bae24425c633d60dabfa1d1ee85869b20cb | platformio/debug/config/native.py | platformio/debug/config/native.py | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
set startup-with-shell off
"""
| # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.compat import IS_WINDOWS
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
""" + (
"set startup-with-shell off" if not IS_WINDOWS else ""
)
| Disable GDB "startup-with-shell" only on Unix platform | Disable GDB "startup-with-shell" only on Unix platform
| Python | apache-2.0 | platformio/platformio-core,platformio/platformio-core,platformio/platformio | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
set startup-with-shell off
"""
Disable GDB "startup-with-shell" only on Unix platform | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.compat import IS_WINDOWS
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
""" + (
"set startup-with-shell off" if not IS_WINDOWS else ""
)
| <commit_before># Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
set startup-with-shell off
"""
<commit_msg>Disable GDB "startup-with-shell" only on Unix platform<commit_after> | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.compat import IS_WINDOWS
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
""" + (
"set startup-with-shell off" if not IS_WINDOWS else ""
)
| # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
set startup-with-shell off
"""
Disable GDB "startup-with-shell" only on Unix platform# Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.compat import IS_WINDOWS
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
""" + (
"set startup-with-shell off" if not IS_WINDOWS else ""
)
| <commit_before># Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
set startup-with-shell off
"""
<commit_msg>Disable GDB "startup-with-shell" only on Unix platform<commit_after># Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from platformio.compat import IS_WINDOWS
from platformio.debug.config.base import DebugConfigBase
class NativeDebugConfig(DebugConfigBase):
GDB_INIT_SCRIPT = """
define pio_reset_halt_target
end
define pio_reset_run_target
end
define pio_restart_target
end
$INIT_BREAK
""" + (
"set startup-with-shell off" if not IS_WINDOWS else ""
)
|
e80817032456fe4fb6ea4735abc0ca0b5bc18ddd | facenet/__init__.py | facenet/__init__.py | # Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
print([float(x) for x in self.p.stdout.readline().strip().split(',')])
| # Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
return [float(x) for x in self.p.stdout.readline().strip().split(',')]
| Return the vector rather than printing it. | Python: Return the vector rather than printing it.
| Python | apache-2.0 | francisleunggie/openface,sahilshah/openface,cmusatyalab/openface,sahilshah/openface,cmusatyalab/openface,nmabhi/Webface,Alexx-G/openface,nhzandi/openface,nmabhi/Webface,xinfang/face-recognize,nhzandi/openface,nmabhi/Webface,francisleunggie/openface,xinfang/face-recognize,Alexx-G/openface,xinfang/face-recognize,Alexx-G/openface,sumsuddinshojib/openface,nmabhi/Webface,sahilshah/openface,sumsuddinshojib/openface,sumsuddinshojib/openface,cmusatyalab/openface,sahilshah/openface,Alexx-G/openface,nhzandi/openface,francisleunggie/openface,sumsuddinshojib/openface | # Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
print([float(x) for x in self.p.stdout.readline().strip().split(',')])
Python: Return the vector rather than printing it. | # Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
return [float(x) for x in self.p.stdout.readline().strip().split(',')]
| <commit_before># Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
print([float(x) for x in self.p.stdout.readline().strip().split(',')])
<commit_msg>Python: Return the vector rather than printing it.<commit_after> | # Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
return [float(x) for x in self.p.stdout.readline().strip().split(',')]
| # Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
print([float(x) for x in self.p.stdout.readline().strip().split(',')])
Python: Return the vector rather than printing it.# Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
return [float(x) for x in self.p.stdout.readline().strip().split(',')]
| <commit_before># Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
print([float(x) for x in self.p.stdout.readline().strip().split(',')])
<commit_msg>Python: Return the vector rather than printing it.<commit_after># Copyright 2015 Carnegie Mellon University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen, PIPE
import os.path
myDir = os.path.dirname(os.path.realpath(__file__))
class TorchWrap:
def __init__(self, model='models/facenet/nn4.v1.t7', imgDim=96, cuda=False):
cmd = ['/usr/bin/env', 'th', os.path.join(myDir,'facenet_server.lua'),
'-model', model, '-imgDim', str(imgDim)]
if cuda:
cmd.append('-cuda')
self.p = Popen(cmd, stdin=PIPE, stdout=PIPE, bufsize=0)
def forward(self, imgPath, timeout=10):
self.p.stdin.write(imgPath+"\n")
return [float(x) for x in self.p.stdout.readline().strip().split(',')]
|
1feed219746a2963bddc6080a5d8e9e467e50fa7 | py/g1/networks/servers/g1/networks/servers/__init__.py | py/g1/networks/servers/g1/networks/servers/__init__.py | __all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
| __all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
if queue.is_full():
LOG.warning(
'handler task queue is full; '
'we cannot accept any new connections'
)
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
| Add warning when server handler task queue is full | Add warning when server handler task queue is full
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | __all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
Add warning when server handler task queue is full | __all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
if queue.is_full():
LOG.warning(
'handler task queue is full; '
'we cannot accept any new connections'
)
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
| <commit_before>__all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
<commit_msg>Add warning when server handler task queue is full<commit_after> | __all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
if queue.is_full():
LOG.warning(
'handler task queue is full; '
'we cannot accept any new connections'
)
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
| __all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
Add warning when server handler task queue is full__all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
if queue.is_full():
LOG.warning(
'handler task queue is full; '
'we cannot accept any new connections'
)
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
| <commit_before>__all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
<commit_msg>Add warning when server handler task queue is full<commit_after>__all__ = [
'SocketServer',
]
import errno
import logging
from g1.asyncs.bases import servers
from g1.asyncs.bases import tasks
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())
class SocketServer:
def __init__(self, socket, handler, max_connections=0):
self._socket = socket
self._handler = handler
self._max_connections = max_connections
async def serve(self):
LOG.info('start server: %r', self._socket)
with self._socket:
if self._max_connections <= 0:
capacity = self._max_connections
else:
# +1 for the `_accept` task.
capacity = self._max_connections + 1
async with tasks.CompletionQueue(capacity) as queue:
await servers.supervise_server(
queue,
(queue.spawn(self._accept(queue)), ),
)
LOG.info('stop server: %r', self._socket)
async def _accept(self, queue):
while True:
if queue.is_full():
LOG.warning(
'handler task queue is full; '
'we cannot accept any new connections'
)
await queue.puttable()
try:
sock, addr = await self._socket.accept()
except OSError as exc:
if exc.errno == errno.EBADF:
LOG.info('server socket close: %r', self._socket)
break
raise
LOG.debug('serve client: %r', addr)
queue.spawn(self._handler(sock, addr))
def shutdown(self):
self._socket.close()
|
6cf5d7db54ee272fa9af66d45a504d5994693ae4 | tests/functional/test_configuration.py | tests/functional/test_configuration.py | """Tests for the config command
"""
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
def test_reads_user_file(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "--list")
assert "test.hello = 1" in result.stdout
| """Tests for the config command
"""
import pytest
import textwrap
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
@pytest.mark.skip("Can't modify underlying file for any mode")
def test_reads_file_appropriately(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "list")
assert "test.hello=1" in result.stdout
def test_modification_pipeline(self, script):
script.pip(
"config", "get", "test.blah", expect_error=True
)
script.pip("config", "set", "test.blah", "1")
result4 = script.pip("config", "get", "test.blah")
assert result4.stdout.strip() == "1"
def test_listing_is_correct(self, script):
script.pip("config", "set", "test.listing-beta", "2")
script.pip("config", "set", "test.listing-alpha", "1")
script.pip("config", "set", "test.listing-gamma", "3")
result = script.pip("config", "list")
lines = list(filter(
lambda x: x.startswith("test.listing-"),
result.stdout.splitlines()
))
expected = """
test.listing-alpha='1'
test.listing-beta='2'
test.listing-gamma='3'
"""
assert lines == textwrap.dedent(expected).strip().splitlines()
| Add basic tests for configuration | Add basic tests for configuration
| Python | mit | zvezdan/pip,pypa/pip,xavfernandez/pip,xavfernandez/pip,techtonik/pip,pradyunsg/pip,RonnyPfannschmidt/pip,pradyunsg/pip,zvezdan/pip,RonnyPfannschmidt/pip,RonnyPfannschmidt/pip,rouge8/pip,sbidoul/pip,xavfernandez/pip,rouge8/pip,pfmoore/pip,zvezdan/pip,techtonik/pip,pypa/pip,sbidoul/pip,pfmoore/pip,rouge8/pip,techtonik/pip | """Tests for the config command
"""
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
def test_reads_user_file(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "--list")
assert "test.hello = 1" in result.stdout
Add basic tests for configuration | """Tests for the config command
"""
import pytest
import textwrap
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
@pytest.mark.skip("Can't modify underlying file for any mode")
def test_reads_file_appropriately(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "list")
assert "test.hello=1" in result.stdout
def test_modification_pipeline(self, script):
script.pip(
"config", "get", "test.blah", expect_error=True
)
script.pip("config", "set", "test.blah", "1")
result4 = script.pip("config", "get", "test.blah")
assert result4.stdout.strip() == "1"
def test_listing_is_correct(self, script):
script.pip("config", "set", "test.listing-beta", "2")
script.pip("config", "set", "test.listing-alpha", "1")
script.pip("config", "set", "test.listing-gamma", "3")
result = script.pip("config", "list")
lines = list(filter(
lambda x: x.startswith("test.listing-"),
result.stdout.splitlines()
))
expected = """
test.listing-alpha='1'
test.listing-beta='2'
test.listing-gamma='3'
"""
assert lines == textwrap.dedent(expected).strip().splitlines()
| <commit_before>"""Tests for the config command
"""
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
def test_reads_user_file(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "--list")
assert "test.hello = 1" in result.stdout
<commit_msg>Add basic tests for configuration<commit_after> | """Tests for the config command
"""
import pytest
import textwrap
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
@pytest.mark.skip("Can't modify underlying file for any mode")
def test_reads_file_appropriately(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "list")
assert "test.hello=1" in result.stdout
def test_modification_pipeline(self, script):
script.pip(
"config", "get", "test.blah", expect_error=True
)
script.pip("config", "set", "test.blah", "1")
result4 = script.pip("config", "get", "test.blah")
assert result4.stdout.strip() == "1"
def test_listing_is_correct(self, script):
script.pip("config", "set", "test.listing-beta", "2")
script.pip("config", "set", "test.listing-alpha", "1")
script.pip("config", "set", "test.listing-gamma", "3")
result = script.pip("config", "list")
lines = list(filter(
lambda x: x.startswith("test.listing-"),
result.stdout.splitlines()
))
expected = """
test.listing-alpha='1'
test.listing-beta='2'
test.listing-gamma='3'
"""
assert lines == textwrap.dedent(expected).strip().splitlines()
| """Tests for the config command
"""
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
def test_reads_user_file(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "--list")
assert "test.hello = 1" in result.stdout
Add basic tests for configuration"""Tests for the config command
"""
import pytest
import textwrap
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
@pytest.mark.skip("Can't modify underlying file for any mode")
def test_reads_file_appropriately(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "list")
assert "test.hello=1" in result.stdout
def test_modification_pipeline(self, script):
script.pip(
"config", "get", "test.blah", expect_error=True
)
script.pip("config", "set", "test.blah", "1")
result4 = script.pip("config", "get", "test.blah")
assert result4.stdout.strip() == "1"
def test_listing_is_correct(self, script):
script.pip("config", "set", "test.listing-beta", "2")
script.pip("config", "set", "test.listing-alpha", "1")
script.pip("config", "set", "test.listing-gamma", "3")
result = script.pip("config", "list")
lines = list(filter(
lambda x: x.startswith("test.listing-"),
result.stdout.splitlines()
))
expected = """
test.listing-alpha='1'
test.listing-beta='2'
test.listing-gamma='3'
"""
assert lines == textwrap.dedent(expected).strip().splitlines()
| <commit_before>"""Tests for the config command
"""
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
def test_reads_user_file(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "--list")
assert "test.hello = 1" in result.stdout
<commit_msg>Add basic tests for configuration<commit_after>"""Tests for the config command
"""
import pytest
import textwrap
from pip.status_codes import ERROR
from tests.lib.configuration_helpers import kinds, ConfigurationFileIOMixin
def test_no_options_passed_should_error(script):
result = script.pip('config', expect_error=True)
assert result.returncode == ERROR
class TestBasicLoading(ConfigurationFileIOMixin):
@pytest.mark.skip("Can't modify underlying file for any mode")
def test_reads_file_appropriately(self, script):
contents = """
[test]
hello = 1
"""
with self.patched_file(kinds.USER, contents):
result = script.pip("config", "list")
assert "test.hello=1" in result.stdout
def test_modification_pipeline(self, script):
script.pip(
"config", "get", "test.blah", expect_error=True
)
script.pip("config", "set", "test.blah", "1")
result4 = script.pip("config", "get", "test.blah")
assert result4.stdout.strip() == "1"
def test_listing_is_correct(self, script):
script.pip("config", "set", "test.listing-beta", "2")
script.pip("config", "set", "test.listing-alpha", "1")
script.pip("config", "set", "test.listing-gamma", "3")
result = script.pip("config", "list")
lines = list(filter(
lambda x: x.startswith("test.listing-"),
result.stdout.splitlines()
))
expected = """
test.listing-alpha='1'
test.listing-beta='2'
test.listing-gamma='3'
"""
assert lines == textwrap.dedent(expected).strip().splitlines()
|
5b71b9e86dc09fe21717a75e45748a81d833c632 | src/test-python.py | src/test-python.py | def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
| def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
elif sys.platform == 'linux2' and (2, 4) <= sys.version_info < (2, 5):
output = Popen([python, "-c", "import socket; print (hasattr(socket, 'ssl'))"], stdout=PIPE).communicate()[0]
if not output.startswith("True"):
raise IOError("Your python at %s doesn't have ssl support, got: %s" % (python, output))
| Check if the installed python2.4 have ssl support. | Check if the installed python2.4 have ssl support.
| Python | mit | upiq/plonebuild,upiq/plonebuild | def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
Check if the installed python2.4 have ssl support. | def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
elif sys.platform == 'linux2' and (2, 4) <= sys.version_info < (2, 5):
output = Popen([python, "-c", "import socket; print (hasattr(socket, 'ssl'))"], stdout=PIPE).communicate()[0]
if not output.startswith("True"):
raise IOError("Your python at %s doesn't have ssl support, got: %s" % (python, output))
| <commit_before>def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
<commit_msg>Check if the installed python2.4 have ssl support.<commit_after> | def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
elif sys.platform == 'linux2' and (2, 4) <= sys.version_info < (2, 5):
output = Popen([python, "-c", "import socket; print (hasattr(socket, 'ssl'))"], stdout=PIPE).communicate()[0]
if not output.startswith("True"):
raise IOError("Your python at %s doesn't have ssl support, got: %s" % (python, output))
| def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
Check if the installed python2.4 have ssl support.def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
elif sys.platform == 'linux2' and (2, 4) <= sys.version_info < (2, 5):
output = Popen([python, "-c", "import socket; print (hasattr(socket, 'ssl'))"], stdout=PIPE).communicate()[0]
if not output.startswith("True"):
raise IOError("Your python at %s doesn't have ssl support, got: %s" % (python, output))
| <commit_before>def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
<commit_msg>Check if the installed python2.4 have ssl support.<commit_after>def test(options, buildout):
from subprocess import Popen, PIPE
import os
import sys
python = options['python']
if not os.path.exists(python):
raise IOError("There is no file at %s" % python)
if sys.platform == 'darwin':
output = Popen([python, "-c", "import platform; print (platform.mac_ver())"], stdout=PIPE).communicate()[0]
if not output.startswith("('10."):
raise IOError("Your python at %s doesn't return proper data for platform.mac_ver(), got: %s" % (python, output))
elif sys.platform == 'linux2' and (2, 4) <= sys.version_info < (2, 5):
output = Popen([python, "-c", "import socket; print (hasattr(socket, 'ssl'))"], stdout=PIPE).communicate()[0]
if not output.startswith("True"):
raise IOError("Your python at %s doesn't have ssl support, got: %s" % (python, output))
|
b3b28bd582d3f1e2ed5e646275760d1d0669acea | WikimediaUtilities.py | WikimediaUtilities.py | from urllib.request import urlopen
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = Utilities.percentEncode(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, None | from urllib.request import urlopen, quote
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = quote(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, None | Use python's built in system for percent-encoding | Use python's built in system for percent-encoding
| Python | mit | alset333/PeopleLookerUpper | from urllib.request import urlopen
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = Utilities.percentEncode(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, NoneUse python's built in system for percent-encoding | from urllib.request import urlopen, quote
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = quote(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, None | <commit_before>from urllib.request import urlopen
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = Utilities.percentEncode(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, None<commit_msg>Use python's built in system for percent-encoding<commit_after> | from urllib.request import urlopen, quote
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = quote(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, None | from urllib.request import urlopen
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = Utilities.percentEncode(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, NoneUse python's built in system for percent-encodingfrom urllib.request import urlopen, quote
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = quote(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, None | <commit_before>from urllib.request import urlopen
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = Utilities.percentEncode(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, None<commit_msg>Use python's built in system for percent-encoding<commit_after>from urllib.request import urlopen, quote
import Utilities
FILENAME_CUE = "File:"
IMAGE_LOCATION_CUE = '<div class="fullMedia"><a href="https://upload.wikimedia.org/wikipedia/commons/'
IMAGE_LOCATION_URL_START = 'https://upload.wikimedia.org/wikipedia/commons/'
def directUrlOfFile(mediaPageURL):
"""Returns (success, url)"""
filenameStart = mediaPageURL.find(FILENAME_CUE) + len(FILENAME_CUE)
filename = mediaPageURL[filenameStart:]
filename_percent_encoded = quote(filename)
print(filename, filename_percent_encoded)
lines = urlopen(mediaPageURL).readlines()
for item in lines:
item = item.decode('utf-8')
item = item.replace('href="//', 'href="https://')
if item.find(IMAGE_LOCATION_CUE) == 0\
and filename_percent_encoded.replace('_','').replace(' ','') in item.replace('_','').replace(' ',''): # Remove spaces and underscores when checking, they seem inconsistent
indexOfCueEnd = item.index(IMAGE_LOCATION_CUE) + len(IMAGE_LOCATION_CUE)
image_location_short = item[indexOfCueEnd : item.find('"', indexOfCueEnd)]
image_location_full = IMAGE_LOCATION_URL_START + image_location_short
return True, image_location_full
return False, None |
3ea008feb5ebd0e4e67952267aa5e3a0c5e13e89 | hoomd/operations.py | hoomd/operations.py | import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
| import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
@property
def integrator(self):
try:
return self._integrator
except AttributeError:
return None
| Add integrator property for Operations | Add integrator property for Operations
| Python | bsd-3-clause | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
Add integrator property for Operations | import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
@property
def integrator(self):
try:
return self._integrator
except AttributeError:
return None
| <commit_before>import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
<commit_msg>Add integrator property for Operations<commit_after> | import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
@property
def integrator(self):
try:
return self._integrator
except AttributeError:
return None
| import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
Add integrator property for Operationsimport hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
@property
def integrator(self):
try:
return self._integrator
except AttributeError:
return None
| <commit_before>import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
<commit_msg>Add integrator property for Operations<commit_after>import hoomd.integrate
class Operations:
def __init__(self, simulation=None):
self.simulation = simulation
self._compute = list()
self._auto_schedule = False
self._scheduled = False
def add(self, op):
if isinstance(op, hoomd.integrate._integrator):
self._integrator = op
else:
raise ValueError("Operation is not of the correct type to add to"
" Operations.")
@property
def _operations(self):
op = list()
if hasattr(self, '_integrator'):
op.append(self._integrator)
return op
@property
def _sys_init(self):
if self.simulation is None or self.simulation.state is None:
return False
else:
return True
def schedule(self):
if not self._sys_init:
raise RuntimeError("System not initialized yet")
sim = self.simulation
for op in self._operations:
new_objs = op.attach(sim)
if isinstance(op, hoomd.integrate._integrator):
sim._cpp_sys.setIntegrator(op._cpp_obj)
if new_objs is not None:
self._compute.extend(new_objs)
self._scheduled = True
def _store_reader(self, reader):
# TODO
pass
@property
def scheduled(self):
return self._scheduled
@property
def integrator(self):
try:
return self._integrator
except AttributeError:
return None
|
b6e532f01d852738f40eb8bedc89f5c056b2f62c | netbox/generate_secret_key.py | netbox/generate_secret_key.py | #!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import random
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
secure_random = random.SystemRandom()
print(''.join(secure_random.sample(charset, 50)))
| #!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import secrets
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
print(''.join(secrets.choice(charset) for _ in range(50)))
| Fix how SECRET_KEY is generated | Fix how SECRET_KEY is generated
Use secrets.choice instead of random.sample to generate the secret key. | Python | apache-2.0 | digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox | #!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import random
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
secure_random = random.SystemRandom()
print(''.join(secure_random.sample(charset, 50)))
Fix how SECRET_KEY is generated
Use secrets.choice instead of random.sample to generate the secret key. | #!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import secrets
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
print(''.join(secrets.choice(charset) for _ in range(50)))
| <commit_before>#!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import random
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
secure_random = random.SystemRandom()
print(''.join(secure_random.sample(charset, 50)))
<commit_msg>Fix how SECRET_KEY is generated
Use secrets.choice instead of random.sample to generate the secret key.<commit_after> | #!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import secrets
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
print(''.join(secrets.choice(charset) for _ in range(50)))
| #!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import random
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
secure_random = random.SystemRandom()
print(''.join(secure_random.sample(charset, 50)))
Fix how SECRET_KEY is generated
Use secrets.choice instead of random.sample to generate the secret key.#!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import secrets
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
print(''.join(secrets.choice(charset) for _ in range(50)))
| <commit_before>#!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import random
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
secure_random = random.SystemRandom()
print(''.join(secure_random.sample(charset, 50)))
<commit_msg>Fix how SECRET_KEY is generated
Use secrets.choice instead of random.sample to generate the secret key.<commit_after>#!/usr/bin/env python
# This script will generate a random 50-character string suitable for use as a SECRET_KEY.
import secrets
charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*(-_=+)'
print(''.join(secrets.choice(charset) for _ in range(50)))
|
7f649a9e4e90587bd88b4b83b648f76287610f16 | pytest_django_haystack.py | pytest_django_haystack.py | import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request, db):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
| import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
request.getfuncargvalue('db')
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
| Move db fixture to the inside of the method | Move db fixture to the inside of the method
| Python | mit | rouge8/pytest-django-haystack | import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request, db):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
Move db fixture to the inside of the method | import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
request.getfuncargvalue('db')
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
| <commit_before>import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request, db):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
<commit_msg>Move db fixture to the inside of the method<commit_after> | import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
request.getfuncargvalue('db')
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
| import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request, db):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
Move db fixture to the inside of the methodimport pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
request.getfuncargvalue('db')
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
| <commit_before>import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request, db):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
<commit_msg>Move db fixture to the inside of the method<commit_after>import pytest
__version__ = '0.1.1'
def pytest_configure(config):
# Register the marks
config.addinivalue_line(
'markers',
'haystack: Mark the test as using the django-haystack search engine, '
'rebuilding the index for each test.')
@pytest.fixture(autouse=True)
def _haystack_marker(request):
"""
Implement the 'haystack' marker.
This rebuilds the index at the start of each test and clears it at the end.
"""
marker = request.keywords.get('haystack', None)
if marker:
from pytest_django.lazy_django import skip_if_no_django
from django.core.management import call_command
request.getfuncargvalue('db')
def clear_index():
call_command('clear_index', interactive=False)
# Skip if Django is not configured
skip_if_no_django()
request.addfinalizer(clear_index)
call_command('rebuild_index', interactive=False)
|
e1ddf1806cf80bf14a6ebe5a2d928f375943a9e4 | alignak_backend/__init__.py | alignak_backend/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
| Fix bad indentation that broke the PEP8 ! | Fix bad indentation that broke the PEP8 !
| Python | agpl-3.0 | Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend,Alignak-monitoring-contrib/alignak-backend | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
Fix bad indentation that broke the PEP8 ! | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
<commit_msg>Fix bad indentation that broke the PEP8 !<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
Fix bad indentation that broke the PEP8 !#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
<commit_msg>Fix bad indentation that broke the PEP8 !<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend
This module is an Alignak REST backend
"""
# Application version and manifest
VERSION = (0, 4, 3)
__application__ = u"Alignak_Backend"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak REST backend"
__releasenotes__ = u"""Alignak REST Backend"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
|
cf836d147c3f55261e41815fb1c5e0a4bd53d41a | resolver_test/__init__.py | resolver_test/__init__.py | # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
| # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_, **__):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
| Allow arbitrary kwargs for die utility function. by: Glenn, Giles | Allow arbitrary kwargs for die utility function. by: Glenn, Giles | Python | mit | pythonanywhere/resolver_test | # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
Allow arbitrary kwargs for die utility function. by: Glenn, Giles | # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_, **__):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
| <commit_before># Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
<commit_msg>Allow arbitrary kwargs for die utility function. by: Glenn, Giles<commit_after> | # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_, **__):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
| # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
Allow arbitrary kwargs for die utility function. by: Glenn, Giles# Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_, **__):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
| <commit_before># Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
<commit_msg>Allow arbitrary kwargs for die utility function. by: Glenn, Giles<commit_after># Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
try:
import unittest2 as unittest
except ImportError:
import unittest
from datetime import timedelta
from mock import call
class ResolverTestMixins(object):
def assertCalledOnce(self, mock, *args, **kwargs):
if mock.call_args_list == []:
self.fail('Not called')
self.assertEquals(mock.call_args_list, [call(*args, **kwargs)])
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_decorated_with(self, fn, decorator):
self.assertIn(decorator.__name__, fn.decorated_by)
def assert_datetime_approximately_equals(self, actual, expected, minutes_leeway=10):
self.assertTrue(
expected - timedelta(minutes=minutes_leeway) <= actual <= expected + timedelta(minutes=minutes_leeway),
"%r is not within %s minutes of %r" % (actual, minutes_leeway, expected)
)
class ResolverTestCase(unittest.TestCase, ResolverTestMixins):
maxDiff = None
def die(exception=None):
if exception is None:
exception = AssertionError('die called')
def inner_die(*_, **__):
raise exception
return inner_die
def is_iterable(seq):
return hasattr(seq, "__iter__")
|
fc37b45a461d8973f78a359016a458b5a3769689 | masters/master.client.v8.ports/master_site_config.py | masters/master.client.v8.ports/master_site_config.py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json' | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3a):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'
| Switch new ports master to master3a | V8: Switch new ports master to master3a
BUG=595708
TBR=tandrii@chromium.org
Review URL: https://codereview.chromium.org/1854673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@299638 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'V8: Switch new ports master to master3a
BUG=595708
TBR=tandrii@chromium.org
Review URL: https://codereview.chromium.org/1854673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@299638 0039d316-1c4b-4281-b951-d872f2087c98 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3a):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'
| <commit_before># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'<commit_msg>V8: Switch new ports master to master3a
BUG=595708
TBR=tandrii@chromium.org
Review URL: https://codereview.chromium.org/1854673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@299638 0039d316-1c4b-4281-b951-d872f2087c98<commit_after> | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3a):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'
| # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'V8: Switch new ports master to master3a
BUG=595708
TBR=tandrii@chromium.org
Review URL: https://codereview.chromium.org/1854673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@299638 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3a):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'
| <commit_before># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'<commit_msg>V8: Switch new ports master to master3a
BUG=595708
TBR=tandrii@chromium.org
Review URL: https://codereview.chromium.org/1854673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@299638 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""ActiveMaster definition."""
from config_bootstrap import Master
class V8Ports(Master.Master3a):
base_app_url = 'https://v8-status.appspot.com'
tree_status_url = base_app_url + '/status'
store_revisions_url = base_app_url + '/revisions'
last_good_url = base_app_url + '/lkgr'
project_name = 'V8 Ports'
master_port_id = 17
project_url = 'https://developers.google.com/v8/'
buildbot_url = 'http://build.chromium.org/p/client.v8.ports/'
service_account_file = 'service-account-v8.json'
|
b145b03b2569f4a82adefe57e843ef91384c47a4 | panoptes/state_machine/states/core.py | panoptes/state_machine/states/core.py | import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg) | import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
raise NotImplementedError(msg)
| Raise exception for state not overriding main | Raise exception for state not overriding main
| Python | mit | joshwalawender/POCS,panoptes/POCS,panoptes/POCS,joshwalawender/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,joshwalawender/POCS,AstroHuntsman/POCS,panoptes/POCS,panoptes/POCS | import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)Raise exception for state not overriding main | import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
raise NotImplementedError(msg)
| <commit_before>import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)<commit_msg>Raise exception for state not overriding main<commit_after> | import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
raise NotImplementedError(msg)
| import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)Raise exception for state not overriding mainimport time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
raise NotImplementedError(msg)
| <commit_before>import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
assert self.panoptes is not None
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
self.panoptes.logger.warning(msg)<commit_msg>Raise exception for state not overriding main<commit_after>import time
import transitions
from panoptes.utils.logger import has_logger
@has_logger
class PanState(transitions.State):
""" Base class for PANOPTES transitions """
def __init__(self, *args, **kwargs):
name = kwargs.get('name', self.__class__)
self.panoptes = kwargs.get('panoptes', None)
super().__init__(name=name, on_enter=['execute'])
self._sleep_delay = 3 # seconds
def main(self, event_data):
msg = "Must implement `main` method inside class {}. Exiting".format(self.name)
raise NotImplementedError(msg)
|
3cfa4f48c6bf28ed4273004d9a44173ecb4b195c | parliament/templatetags/parliament.py | parliament/templatetags/parliament.py | from django import template
register = template.Library()
@register.filter(name='governing')
def governing(party, date):
return party.is_governing(date)
| from django import template
from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
def governing(obj, date=None):
if isinstance(obj, Party):
assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
return obj.is_governing(date)
elif isinstance(obj, Statement):
if obj.member is None:
if 'ministeri' in obj.speaker_role:
return True
else:
return False
if date is None:
date = obj.item.plsess.date
return obj.member.party.is_governing(date)
| Allow governing templatetag to be called with a Statement object | Allow governing templatetag to be called with a Statement object
| Python | agpl-3.0 | kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu,kansanmuisti/kamu | from django import template
register = template.Library()
@register.filter(name='governing')
def governing(party, date):
return party.is_governing(date)
Allow governing templatetag to be called with a Statement object | from django import template
from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
def governing(obj, date=None):
if isinstance(obj, Party):
assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
return obj.is_governing(date)
elif isinstance(obj, Statement):
if obj.member is None:
if 'ministeri' in obj.speaker_role:
return True
else:
return False
if date is None:
date = obj.item.plsess.date
return obj.member.party.is_governing(date)
| <commit_before>from django import template
register = template.Library()
@register.filter(name='governing')
def governing(party, date):
return party.is_governing(date)
<commit_msg>Allow governing templatetag to be called with a Statement object<commit_after> | from django import template
from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
def governing(obj, date=None):
if isinstance(obj, Party):
assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
return obj.is_governing(date)
elif isinstance(obj, Statement):
if obj.member is None:
if 'ministeri' in obj.speaker_role:
return True
else:
return False
if date is None:
date = obj.item.plsess.date
return obj.member.party.is_governing(date)
| from django import template
register = template.Library()
@register.filter(name='governing')
def governing(party, date):
return party.is_governing(date)
Allow governing templatetag to be called with a Statement objectfrom django import template
from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
def governing(obj, date=None):
if isinstance(obj, Party):
assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
return obj.is_governing(date)
elif isinstance(obj, Statement):
if obj.member is None:
if 'ministeri' in obj.speaker_role:
return True
else:
return False
if date is None:
date = obj.item.plsess.date
return obj.member.party.is_governing(date)
| <commit_before>from django import template
register = template.Library()
@register.filter(name='governing')
def governing(party, date):
return party.is_governing(date)
<commit_msg>Allow governing templatetag to be called with a Statement object<commit_after>from django import template
from ..models import Party, Statement
register = template.Library()
@register.filter(name='governing')
def governing(obj, date=None):
if isinstance(obj, Party):
assert date is not None, "Date must be supplied when 'govern' is called with a Party object"
return obj.is_governing(date)
elif isinstance(obj, Statement):
if obj.member is None:
if 'ministeri' in obj.speaker_role:
return True
else:
return False
if date is None:
date = obj.item.plsess.date
return obj.member.party.is_governing(date)
|
5c074950663d2e508fee0e015472e8460bf5b183 | rootpy/plotting/canvas.py | rootpy/plotting/canvas.py | """
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ctypes, ctypes.util
ctypes.cdll.LoadLibrary(ctypes.util.find_library("Gui"))
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
| """
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
| Remove code which should never have made it in | Remove code which should never have made it in
| Python | bsd-3-clause | rootpy/rootpy,kreczko/rootpy,kreczko/rootpy,kreczko/rootpy,rootpy/rootpy,ndawe/rootpy,rootpy/rootpy,ndawe/rootpy,ndawe/rootpy | """
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ctypes, ctypes.util
ctypes.cdll.LoadLibrary(ctypes.util.find_library("Gui"))
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
Remove code which should never have made it in | """
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
| <commit_before>"""
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ctypes, ctypes.util
ctypes.cdll.LoadLibrary(ctypes.util.find_library("Gui"))
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
<commit_msg>Remove code which should never have made it in<commit_after> | """
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
| """
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ctypes, ctypes.util
ctypes.cdll.LoadLibrary(ctypes.util.find_library("Gui"))
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
Remove code which should never have made it in"""
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
| <commit_before>"""
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ctypes, ctypes.util
ctypes.cdll.LoadLibrary(ctypes.util.find_library("Gui"))
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
<commit_msg>Remove code which should never have made it in<commit_after>"""
This module implements python classes which inherit from
and extend the functionality of the ROOT canvas classes.
"""
import ROOT
from ..core import Object
from .. import rootpy_globals as _globals
from .. import defaults, QROOT
class _PadBase(Object):
def _post_init(self):
self.members = []
_globals.pad = self
def Clear(self, *args, **kwargs):
self.members = []
self.ROOT_base.Clear(self, *args, **kwargs)
def OwnMembers(self):
for thing in self.GetListOfPrimitives():
if thing not in self.members:
self.members.append(thing)
def cd(self, *args):
_globals.pad = self
return self.ROOT_base.cd(self, *args)
class Pad(_PadBase, QROOT.TPad):
def __init__(self, *args, **kwargs):
ROOT.TPad.__init__(self, *args, **kwargs)
self._post_init()
class Canvas(_PadBase, QROOT.TCanvas):
def __init__(self,
width=defaults.CANVAS_WIDTH,
height=defaults.CANVAS_HEIGHT,
xpos=0, ypos=0, name=None, title=None):
Object.__init__(self, name, title, xpos, ypos, width, height)
self._post_init()
|
fc6ca51d4a865368f82c26426a2d6c8d8366e25d | tcconfig/tcshow.py | tcconfig/tcshow.py | #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import sys
try:
import json
except ImportError:
import simplejson as json
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import json
import sys
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
| Drop support for Python 2.6 | Drop support for Python 2.6
| Python | mit | thombashi/tcconfig,thombashi/tcconfig | #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import sys
try:
import json
except ImportError:
import simplejson as json
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
Drop support for Python 2.6 | #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import json
import sys
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
| <commit_before>#!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import sys
try:
import json
except ImportError:
import simplejson as json
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Drop support for Python 2.6<commit_after> | #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import json
import sys
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import sys
try:
import json
except ImportError:
import simplejson as json
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
Drop support for Python 2.6#!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import json
import sys
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
| <commit_before>#!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import sys
try:
import json
except ImportError:
import simplejson as json
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Drop support for Python 2.6<commit_after>#!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from __future__ import with_statement
import json
import sys
import six
import thutils
import tcconfig
import tcconfig.traffic_control
from ._common import verify_network_interface
def parse_option():
parser = thutils.option.ArgumentParserObject()
parser.make(version=tcconfig.VERSION)
group = parser.add_argument_group("Traffic Control")
group.add_argument(
"--device", action="append", required=True,
help="network device name (e.g. eth0)")
return parser.parse_args()
@thutils.main.Main
def main():
options = parse_option()
thutils.initialize_library(__file__, options)
thutils.common.verify_install_command(["tc"])
subproc_wrapper = thutils.subprocwrapper.SubprocessWrapper()
tc_param = {}
for device in options.device:
verify_network_interface(device)
tc = tcconfig.traffic_control.TrafficControl(
subproc_wrapper, device)
tc_param.update(tc.get_tc_parameter())
six.print_(json.dumps(tc_param, indent=4))
return 0
if __name__ == '__main__':
sys.exit(main())
|
2b892b58049bd2b99ae97b62149f88c8001c82ca | ceph_deploy/tests/test_cli_osd.py | ceph_deploy/tests/test_cli_osd.py | import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with tmpdir.join('ceph.conf').open('w'):
pass
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
| import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
| Remove unneeded creation of .conf file | [RM-11742] Remove unneeded creation of .conf file
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>
| Python | mit | trhoden/ceph-deploy,branto1/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,branto1/ceph-deploy,ceph/ceph-deploy,SUSE/ceph-deploy,isyippee/ceph-deploy,ghxandsky/ceph-deploy,imzhulei/ceph-deploy,codenrhoden/ceph-deploy,codenrhoden/ceph-deploy,shenhequnying/ceph-deploy,zhouyuan/ceph-deploy,shenhequnying/ceph-deploy,ghxandsky/ceph-deploy,imzhulei/ceph-deploy,trhoden/ceph-deploy,zhouyuan/ceph-deploy,Vicente-Cheng/ceph-deploy,isyippee/ceph-deploy,osynge/ceph-deploy,SUSE/ceph-deploy,osynge/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,Vicente-Cheng/ceph-deploy,ceph/ceph-deploy | import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with tmpdir.join('ceph.conf').open('w'):
pass
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
[RM-11742] Remove unneeded creation of .conf file
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com> | import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
| <commit_before>import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with tmpdir.join('ceph.conf').open('w'):
pass
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
<commit_msg>[RM-11742] Remove unneeded creation of .conf file
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com><commit_after> | import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
| import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with tmpdir.join('ceph.conf').open('w'):
pass
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
[RM-11742] Remove unneeded creation of .conf file
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com>import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
| <commit_before>import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with tmpdir.join('ceph.conf').open('w'):
pass
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
<commit_msg>[RM-11742] Remove unneeded creation of .conf file
Signed-off-by: Travis Rhoden <e5e44d6dbac12e32e01c3bb8b67940d8b42e225b@redhat.com><commit_after>import pytest
import subprocess
def test_help(tmpdir, cli):
with cli(
args=['ceph-deploy', 'osd', '--help'],
stdout=subprocess.PIPE,
) as p:
result = p.stdout.read()
assert 'usage: ceph-deploy osd' in result
assert 'positional arguments' in result
assert 'optional arguments' in result
def test_bad_subcommand(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd', 'fakehost:/does-not-exist'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'ceph-deploy osd: error' in result
assert 'invalid choice' in result
assert err.value.status == 2
def test_bad_no_disk(tmpdir, cli):
with pytest.raises(cli.Failed) as err:
with cli(
args=['ceph-deploy', 'osd'],
stderr=subprocess.PIPE,
) as p:
result = p.stderr.read()
assert 'usage: ceph-deploy osd' in result
assert 'too few arguments' in result
assert err.value.status == 2
|
913590519e05a6209efb1102649ea7aba4abfbf5 | airship/__init__.py | airship/__init__.py | import os
import json
from flask import Flask, render_template
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
jsonbody = json.dumps(channels)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return
return app
| import os
import json
from flask import Flask, render_template
def jsonate(obj, escaped):
jsonbody = json.dumps(obj)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
return jsonate(channels, escaped)
def grefs_json(station, channel, escaped=False):
grefs = [{"name": gref} for gref in station.grefs(channel)]
return jsonate(grefs, escaped)
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return grefs_json(station, channel)
return app
| Fix the grefs route in the airship server | Fix the grefs route in the airship server
| Python | mit | richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation | import os
import json
from flask import Flask, render_template
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
jsonbody = json.dumps(channels)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return
return app
Fix the grefs route in the airship server | import os
import json
from flask import Flask, render_template
def jsonate(obj, escaped):
jsonbody = json.dumps(obj)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
return jsonate(channels, escaped)
def grefs_json(station, channel, escaped=False):
grefs = [{"name": gref} for gref in station.grefs(channel)]
return jsonate(grefs, escaped)
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return grefs_json(station, channel)
return app
| <commit_before>import os
import json
from flask import Flask, render_template
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
jsonbody = json.dumps(channels)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return
return app
<commit_msg>Fix the grefs route in the airship server<commit_after> | import os
import json
from flask import Flask, render_template
def jsonate(obj, escaped):
jsonbody = json.dumps(obj)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
return jsonate(channels, escaped)
def grefs_json(station, channel, escaped=False):
grefs = [{"name": gref} for gref in station.grefs(channel)]
return jsonate(grefs, escaped)
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return grefs_json(station, channel)
return app
| import os
import json
from flask import Flask, render_template
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
jsonbody = json.dumps(channels)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return
return app
Fix the grefs route in the airship serverimport os
import json
from flask import Flask, render_template
def jsonate(obj, escaped):
jsonbody = json.dumps(obj)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
return jsonate(channels, escaped)
def grefs_json(station, channel, escaped=False):
grefs = [{"name": gref} for gref in station.grefs(channel)]
return jsonate(grefs, escaped)
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return grefs_json(station, channel)
return app
| <commit_before>import os
import json
from flask import Flask, render_template
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
jsonbody = json.dumps(channels)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return
return app
<commit_msg>Fix the grefs route in the airship server<commit_after>import os
import json
from flask import Flask, render_template
def jsonate(obj, escaped):
jsonbody = json.dumps(obj)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
return jsonate(channels, escaped)
def grefs_json(station, channel, escaped=False):
grefs = [{"name": gref} for gref in station.grefs(channel)]
return jsonate(grefs, escaped)
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return grefs_json(station, channel)
return app
|
37953d6ee56fedbe5e03f738ddbf28c3433718e7 | onestop/registry.py | onestop/registry.py | """Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path='.'):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
| """Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path=None):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
| Fix bug where ONESTOP_REGISTRY env var was not checked | Fix bug where ONESTOP_REGISTRY env var was not checked
| Python | mit | transitland/transitland-python-client,srthurman/transitland-python-client | """Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path='.'):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
Fix bug where ONESTOP_REGISTRY env var was not checked | """Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path=None):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
| <commit_before>"""Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path='.'):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
<commit_msg>Fix bug where ONESTOP_REGISTRY env var was not checked<commit_after> | """Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path=None):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
| """Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path='.'):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
Fix bug where ONESTOP_REGISTRY env var was not checked"""Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path=None):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
| <commit_before>"""Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path='.'):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
<commit_msg>Fix bug where ONESTOP_REGISTRY env var was not checked<commit_after>"""Read and write Onestop data."""
import sys
import os
import glob
import json
import argparse
import urllib
import mzgeohash
import util
import entities
import errors
class OnestopRegistry(object):
"""Onestop Registry."""
def __init__(self, path=None):
"""Path to directory containing feeds."""
# Path to registry
self.path = path or os.getenv('ONESTOP_REGISTRY') or '.'
if not os.path.exists(os.path.join(self.path, 'feeds')):
raise errors.OnestopInvalidRegistry(
'Invalid Onestop Registry directory: %s'%self.path
)
def _registered(self, path, prefix):
return [
os.path.basename(i).partition('.')[0]
for i in glob.glob(
os.path.join(self.path, path, '%s-*.*json'%prefix)
)
]
def feeds(self):
return self._registered('feeds', 'f')
def feed(self, onestopId):
"""Load a feed by onestopId."""
filename = os.path.join(self.path, 'feeds', '%s.json'%onestopId)
with open(filename) as f:
data = json.load(f)
return entities.OnestopFeed.from_json(data)
|
06b7a81a0c89177e6ac1913cab65819b7b565754 | python/ssc/__init__.py | python/ssc/__init__.py | # outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import predictive_dm, decode_dm
from ssc.btc import lin2btc, btc2lin, calc_rc
from ssc.configure import *
| # outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import lin2dm, dm2lin, calc_a_value
from ssc.btc import lin2btc, btc2lin, calc_rc
| Update function names from dm.py and removed import to configure.py | Update function names from dm.py and removed import to configure.py
| Python | bsd-3-clause | Zardoz89/Simple-Sound-Codecs,Zardoz89/Simple-Sound-Codecs | # outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import predictive_dm, decode_dm
from ssc.btc import lin2btc, btc2lin, calc_rc
from ssc.configure import *
Update function names from dm.py and removed import to configure.py | # outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import lin2dm, dm2lin, calc_a_value
from ssc.btc import lin2btc, btc2lin, calc_rc
| <commit_before># outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import predictive_dm, decode_dm
from ssc.btc import lin2btc, btc2lin, calc_rc
from ssc.configure import *
<commit_msg>Update function names from dm.py and removed import to configure.py<commit_after> | # outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import lin2dm, dm2lin, calc_a_value
from ssc.btc import lin2btc, btc2lin, calc_rc
| # outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import predictive_dm, decode_dm
from ssc.btc import lin2btc, btc2lin, calc_rc
from ssc.configure import *
Update function names from dm.py and removed import to configure.py# outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import lin2dm, dm2lin, calc_a_value
from ssc.btc import lin2btc, btc2lin, calc_rc
| <commit_before># outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import predictive_dm, decode_dm
from ssc.btc import lin2btc, btc2lin, calc_rc
from ssc.configure import *
<commit_msg>Update function names from dm.py and removed import to configure.py<commit_after># outer __init__.py
"""
Implementation of some simple and dumb audio codecs, like Delta Modualtion
"""
from ssc.aux import pack, unpack
from ssc.dm import lin2dm, dm2lin, calc_a_value
from ssc.btc import lin2btc, btc2lin, calc_rc
|
6d15230f46c22226f6a2e84ac41fc39e6c5c190b | linode/objects/linode/backup.py | linode/objects/linode/backup.py | from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'create_dt': Property(is_datetime=True),
'duration': Property(),
'finish_dt': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
| from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'created': Property(is_datetime=True),
'duration': Property(),
'finished': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
| Fix datetime fields in Backup and SupportTicket | Fix datetime fields in Backup and SupportTicket
This closes #23.
| Python | bsd-3-clause | linode/python-linode-api,jo-tez/python-linode-api | from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'create_dt': Property(is_datetime=True),
'duration': Property(),
'finish_dt': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
Fix datetime fields in Backup and SupportTicket
This closes #23. | from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'created': Property(is_datetime=True),
'duration': Property(),
'finished': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
| <commit_before>from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'create_dt': Property(is_datetime=True),
'duration': Property(),
'finish_dt': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
<commit_msg>Fix datetime fields in Backup and SupportTicket
This closes #23.<commit_after> | from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'created': Property(is_datetime=True),
'duration': Property(),
'finished': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
| from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'create_dt': Property(is_datetime=True),
'duration': Property(),
'finish_dt': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
Fix datetime fields in Backup and SupportTicket
This closes #23.from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'created': Property(is_datetime=True),
'duration': Property(),
'finished': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
| <commit_before>from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'create_dt': Property(is_datetime=True),
'duration': Property(),
'finish_dt': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
<commit_msg>Fix datetime fields in Backup and SupportTicket
This closes #23.<commit_after>from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'created': Property(is_datetime=True),
'duration': Property(),
'finished': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
|
19c087941e193b79b6f76e75cc024878ef8c7c6f | examples/test.py | examples/test.py | from nanomon import resources
from nanomon import registry
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Host('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
| from nanomon import resources
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Node('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
| Update to use Node instead of Host | Update to use Node instead of Host
| Python | bsd-2-clause | cloudtools/nymms | from nanomon import resources
from nanomon import registry
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Host('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
Update to use Node instead of Host | from nanomon import resources
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Node('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
| <commit_before>from nanomon import resources
from nanomon import registry
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Host('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
<commit_msg>Update to use Node instead of Host<commit_after> | from nanomon import resources
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Node('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
| from nanomon import resources
from nanomon import registry
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Host('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
Update to use Node instead of Hostfrom nanomon import resources
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Node('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
| <commit_before>from nanomon import resources
from nanomon import registry
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Host('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
<commit_msg>Update to use Node instead of Host<commit_after>from nanomon import resources
import logging
logging.basicConfig(level=logging.DEBUG)
webserver_group = resources.MonitoringGroup('webservers', port=80)
www1 = resources.Node('www1', monitoring_groups=[webserver_group,], port=443, type='m1.xlarge')
http_check = resources.Command('check_http',
'check_http {host[address]} {port}', port=80)
http_monitor = resources.Monitor('http_monitor', command=http_check, monitoring_groups=[webserver_group,])
|
8b3538150bbd3aa1dea0ad060b32a35acb80c51a | common/test/acceptance/edxapp_pages/lms/find_courses.py | common/test/acceptance/edxapp_pages/lms/find_courses.py | """
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return self.browser.title == "edX"
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
| """
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return "edX" in self.browser.title
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
| Fix find courses page title in bok choy test suite | Fix find courses page title in bok choy test suite
| Python | agpl-3.0 | nttks/jenkins-test,Unow/edx-platform,amir-qayyum-khan/edx-platform,nttks/edx-platform,DNFcode/edx-platform,jbassen/edx-platform,morenopc/edx-platform,procangroup/edx-platform,atsolakid/edx-platform,itsjeyd/edx-platform,jazztpt/edx-platform,jamesblunt/edx-platform,eduNEXT/edunext-platform,jruiperezv/ANALYSE,stvstnfrd/edx-platform,cecep-edu/edx-platform,devs1991/test_edx_docmode,openfun/edx-platform,OmarIthawi/edx-platform,Shrhawk/edx-platform,Semi-global/edx-platform,nanolearning/edx-platform,Softmotions/edx-platform,IndonesiaX/edx-platform,shubhdev/openedx,hamzehd/edx-platform,arifsetiawan/edx-platform,mtlchun/edx,hkawasaki/kawasaki-aio8-0,vasyarv/edx-platform,shubhdev/edxOnBaadal,ahmadio/edx-platform,vismartltd/edx-platform,inares/edx-platform,xinjiguaike/edx-platform,J861449197/edx-platform,mushtaqak/edx-platform,jonathan-beard/edx-platform,gsehub/edx-platform,shubhdev/edx-platform,Semi-global/edx-platform,ampax/edx-platform-backup,antonve/s4-project-mooc,chudaol/edx-platform,xuxiao19910803/edx,nanolearning/edx-platform,Unow/edx-platform,jolyonb/edx-platform,hkawasaki/kawasaki-aio8-2,a-parhom/edx-platform,doismellburning/edx-platform,Edraak/circleci-edx-platform,RPI-OPENEDX/edx-platform,wwj718/edx-platform,bigdatauniversity/edx-platform,DefyVentures/edx-platform,ZLLab-Mooc/edx-platform,kxliugang/edx-platform,TeachAtTUM/edx-platform,inares/edx-platform,IndonesiaX/edx-platform,don-github/edx-platform,nanolearning/edx-platform,vismartltd/edx-platform,jazztpt/edx-platform,adoosii/edx-platform,ovnicraft/edx-platform,philanthropy-u/edx-platform,SravanthiSinha/edx-platform,bitifirefly/edx-platform,stvstnfrd/edx-platform,Edraak/edx-platform,eestay/edx-platform,ahmadio/edx-platform,nanolearningllc/edx-platform-cypress,romain-li/edx-platform,pepeportela/edx-platform,doganov/edx-platform,doganov/edx-platform,CredoReference/edx-platform,Endika/edx-platform,jamiefolsom/edx-platform,halvertoluke/edx-platform,franosincic/edx-platform,MSOpenTech/edx-platform,amir-qayyum-khan/edx-platform,10clouds/edx-platform,RPI-OPENEDX/edx-platform,unicri/edx-platform,jbzdak/edx-platform,ZLLab-Mooc/edx-platform,Softmotions/edx-platform,etzhou/edx-platform,yokose-ks/edx-platform,msegado/edx-platform,mitocw/edx-platform,wwj718/edx-platform,devs1991/test_edx_docmode,ak2703/edx-platform,tiagochiavericosta/edx-platform,Edraak/edraak-platform,nttks/edx-platform,nikolas/edx-platform,SravanthiSinha/edx-platform,carsongee/edx-platform,zerobatu/edx-platform,4eek/edx-platform,raccoongang/edx-platform,hmcmooc/muddx-platform,alexthered/kienhoc-platform,jelugbo/tundex,etzhou/edx-platform,shurihell/testasia,dkarakats/edx-platform,MSOpenTech/edx-platform,simbs/edx-platform,arbrandes/edx-platform,shurihell/testasia,Stanford-Online/edx-platform,appsembler/edx-platform,iivic/BoiseStateX,zubair-arbi/edx-platform,hkawasaki/kawasaki-aio8-2,yokose-ks/edx-platform,longmen21/edx-platform,sameetb-cuelogic/edx-platform-test,peterm-itr/edx-platform,openfun/edx-platform,synergeticsedx/deployment-wipro,edry/edx-platform,CourseTalk/edx-platform,edry/edx-platform,SivilTaram/edx-platform,chauhanhardik/populo_2,DefyVentures/edx-platform,shubhdev/openedx,gsehub/edx-platform,rismalrv/edx-platform,devs1991/test_edx_docmode,Edraak/circleci-edx-platform,zadgroup/edx-platform,dsajkl/reqiop,shurihell/testasia,cpennington/edx-platform,cecep-edu/edx-platform,abdoosh00/edraak,Edraak/edraak-platform,hastexo/edx-platform,romain-li/edx-platform,alu042/edx-platform,JCBarahona/edX,pomegranited/edx-platform,procangroup/edx-platform,CourseTalk/edx-platform,ubc/edx-platform,wwj718/edx-platform,louyihua/edx-platform,fly19890211/edx-platform,SivilTaram/edx-platform,caesar2164/edx-platform,kxliugang/edx-platform,Stanford-Online/edx-platform,Edraak/edx-platform,proversity-org/edx-platform,BehavioralInsightsTeam/edx-platform,Shrhawk/edx-platform,WatanabeYasumasa/edx-platform,itsjeyd/edx-platform,CredoReference/edx-platform,teltek/edx-platform,ESOedX/edx-platform,mcgachey/edx-platform,ahmadiga/min_edx,jbzdak/edx-platform,jolyonb/edx-platform,hkawasaki/kawasaki-aio8-2,mushtaqak/edx-platform,mahendra-r/edx-platform,edx/edx-platform,dsajkl/reqiop,angelapper/edx-platform,pabloborrego93/edx-platform,pku9104038/edx-platform,DNFcode/edx-platform,ferabra/edx-platform,vasyarv/edx-platform,ubc/edx-platform,doismellburning/edx-platform,halvertoluke/edx-platform,shashank971/edx-platform,lduarte1991/edx-platform,jzoldak/edx-platform,abdoosh00/edraak,Semi-global/edx-platform,fintech-circle/edx-platform,nttks/jenkins-test,dkarakats/edx-platform,rhndg/openedx,longmen21/edx-platform,kmoocdev/edx-platform,kmoocdev2/edx-platform,procangroup/edx-platform,cyanna/edx-platform,leansoft/edx-platform,jamiefolsom/edx-platform,xinjiguaike/edx-platform,kmoocdev2/edx-platform,dcosentino/edx-platform,zubair-arbi/edx-platform,Edraak/edraak-platform,jbzdak/edx-platform,ZLLab-Mooc/edx-platform,xuxiao19910803/edx-platform,mjirayu/sit_academy,franosincic/edx-platform,EDUlib/edx-platform,doismellburning/edx-platform,gymnasium/edx-platform,JCBarahona/edX,ferabra/edx-platform,gymnasium/edx-platform,jelugbo/tundex,hkawasaki/kawasaki-aio8-1,UXE/local-edx,shurihell/testasia,martynovp/edx-platform,sameetb-cuelogic/edx-platform-test,pomegranited/edx-platform,utecuy/edx-platform,10clouds/edx-platform,J861449197/edx-platform,eduNEXT/edx-platform,dkarakats/edx-platform,dcosentino/edx-platform,shashank971/edx-platform,longmen21/edx-platform,hastexo/edx-platform,mitocw/edx-platform,MakeHer/edx-platform,eestay/edx-platform,y12uc231/edx-platform,appliedx/edx-platform,IONISx/edx-platform,xinjiguaike/edx-platform,gsehub/edx-platform,lduarte1991/edx-platform,jswope00/GAI,mtlchun/edx,philanthropy-u/edx-platform,zofuthan/edx-platform,jzoldak/edx-platform,appsembler/edx-platform,torchingloom/edx-platform,auferack08/edx-platform,ESOedX/edx-platform,4eek/edx-platform,motion2015/a3,martynovp/edx-platform,jazkarta/edx-platform-for-isc,openfun/edx-platform,bdero/edx-platform,jswope00/GAI,dsajkl/reqiop,zhenzhai/edx-platform,leansoft/edx-platform,iivic/BoiseStateX,Livit/Livit.Learn.EdX,SravanthiSinha/edx-platform,Stanford-Online/edx-platform,nagyistoce/edx-platform,beacloudgenius/edx-platform,hmcmooc/muddx-platform,WatanabeYasumasa/edx-platform,UXE/local-edx,Softmotions/edx-platform,fintech-circle/edx-platform,antonve/s4-project-mooc,leansoft/edx-platform,shubhdev/edx-platform,jruiperezv/ANALYSE,ahmedaljazzar/edx-platform,nttks/edx-platform,stvstnfrd/edx-platform,carsongee/edx-platform,xuxiao19910803/edx-platform,synergeticsedx/deployment-wipro,mitocw/edx-platform,LICEF/edx-platform,mbareta/edx-platform-ft,simbs/edx-platform,nagyistoce/edx-platform,chrisndodge/edx-platform,valtech-mooc/edx-platform,etzhou/edx-platform,pku9104038/edx-platform,fly19890211/edx-platform,valtech-mooc/edx-platform,valtech-mooc/edx-platform,raccoongang/edx-platform,unicri/edx-platform,alexthered/kienhoc-platform,beacloudgenius/edx-platform,jbassen/edx-platform,MSOpenTech/edx-platform,kxliugang/edx-platform,SravanthiSinha/edx-platform,playm2mboy/edx-platform,sudheerchintala/LearnEraPlatForm,raccoongang/edx-platform,defance/edx-platform,don-github/edx-platform,hkawasaki/kawasaki-aio8-1,sameetb-cuelogic/edx-platform-test,nanolearningllc/edx-platform-cypress,prarthitm/edxplatform,waheedahmed/edx-platform,SravanthiSinha/edx-platform,shubhdev/openedx,kxliugang/edx-platform,JCBarahona/edX,rismalrv/edx-platform,nanolearningllc/edx-platform-cypress,JioEducation/edx-platform,Kalyzee/edx-platform,nanolearningllc/edx-platform-cypress,pepeportela/edx-platform,nanolearning/edx-platform,hkawasaki/kawasaki-aio8-2,ak2703/edx-platform,shabab12/edx-platform,philanthropy-u/edx-platform,10clouds/edx-platform,Shrhawk/edx-platform,edx-solutions/edx-platform,hkawasaki/kawasaki-aio8-1,shubhdev/edxOnBaadal,romain-li/edx-platform,motion2015/a3,ahmadiga/min_edx,hastexo/edx-platform,gymnasium/edx-platform,motion2015/edx-platform,zofuthan/edx-platform,jruiperezv/ANALYSE,prarthitm/edxplatform,vismartltd/edx-platform,kursitet/edx-platform,mjirayu/sit_academy,MSOpenTech/edx-platform,nanolearningllc/edx-platform-cypress,tanmaykm/edx-platform,OmarIthawi/edx-platform,ferabra/edx-platform,SivilTaram/edx-platform,solashirai/edx-platform,hamzehd/edx-platform,BehavioralInsightsTeam/edx-platform,bigdatauniversity/edx-platform,motion2015/a3,tanmaykm/edx-platform,chauhanhardik/populo_2,CredoReference/edx-platform,martynovp/edx-platform,morenopc/edx-platform,carsongee/edx-platform,ahmadiga/min_edx,chauhanhardik/populo,Kalyzee/edx-platform,kmoocdev/edx-platform,mtlchun/edx,nikolas/edx-platform,gymnasium/edx-platform,unicri/edx-platform,ahmadio/edx-platform,kmoocdev2/edx-platform,kamalx/edx-platform,AkA84/edx-platform,adoosii/edx-platform,shubhdev/edxOnBaadal,vasyarv/edx-platform,chauhanhardik/populo,peterm-itr/edx-platform,B-MOOC/edx-platform,hastexo/edx-platform,chauhanhardik/populo_2,naresh21/synergetics-edx-platform,andyzsf/edx,IONISx/edx-platform,marcore/edx-platform,benpatterson/edx-platform,jazkarta/edx-platform-for-isc,cselis86/edx-platform,eduNEXT/edx-platform,alu042/edx-platform,prarthitm/edxplatform,eemirtekin/edx-platform,Shrhawk/edx-platform,edx-solutions/edx-platform,nikolas/edx-platform,vikas1885/test1,xuxiao19910803/edx-platform,nttks/edx-platform,mtlchun/edx,zubair-arbi/edx-platform,jjmiranda/edx-platform,benpatterson/edx-platform,xingyepei/edx-platform,tanmaykm/edx-platform,motion2015/edx-platform,zhenzhai/edx-platform,kursitet/edx-platform,cecep-edu/edx-platform,torchingloom/edx-platform,jazkarta/edx-platform,atsolakid/edx-platform,ampax/edx-platform-backup,motion2015/a3,arifsetiawan/edx-platform,rue89-tech/edx-platform,chand3040/cloud_that,dcosentino/edx-platform,leansoft/edx-platform,doganov/edx-platform,jelugbo/tundex,edry/edx-platform,kursitet/edx-platform,polimediaupv/edx-platform,Lektorium-LLC/edx-platform,fly19890211/edx-platform,beni55/edx-platform,kmoocdev2/edx-platform,proversity-org/edx-platform,kamalx/edx-platform,mahendra-r/edx-platform,defance/edx-platform,longmen21/edx-platform,vasyarv/edx-platform,shubhdev/openedx,solashirai/edx-platform,don-github/edx-platform,philanthropy-u/edx-platform,JioEducation/edx-platform,MakeHer/edx-platform,edry/edx-platform,jazztpt/edx-platform,zerobatu/edx-platform,TeachAtTUM/edx-platform,wwj718/ANALYSE,romain-li/edx-platform,analyseuc3m/ANALYSE-v1,eemirtekin/edx-platform,bitifirefly/edx-platform,olexiim/edx-platform,alexthered/kienhoc-platform,antonve/s4-project-mooc,chrisndodge/edx-platform,inares/edx-platform,B-MOOC/edx-platform,xuxiao19910803/edx-platform,bigdatauniversity/edx-platform,shashank971/edx-platform,xingyepei/edx-platform,andyzsf/edx,rismalrv/edx-platform,simbs/edx-platform,atsolakid/edx-platform,CredoReference/edx-platform,jjmiranda/edx-platform,caesar2164/edx-platform,JCBarahona/edX,pabloborrego93/edx-platform,OmarIthawi/edx-platform,msegado/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,motion2015/a3,ampax/edx-platform-backup,teltek/edx-platform,ubc/edx-platform,deepsrijit1105/edx-platform,edx/edx-platform,UOMx/edx-platform,etzhou/edx-platform,auferack08/edx-platform,Semi-global/edx-platform,cpennington/edx-platform,synergeticsedx/deployment-wipro,fintech-circle/edx-platform,benpatterson/edx-platform,auferack08/edx-platform,Edraak/circleci-edx-platform,ahmadio/edx-platform,utecuy/edx-platform,y12uc231/edx-platform,zhenzhai/edx-platform,appsembler/edx-platform,cognitiveclass/edx-platform,utecuy/edx-platform,teltek/edx-platform,nanolearningllc/edx-platform-cypress-2,jamiefolsom/edx-platform,alu042/edx-platform,olexiim/edx-platform,chudaol/edx-platform,ovnicraft/edx-platform,y12uc231/edx-platform,halvertoluke/edx-platform,unicri/edx-platform,J861449197/edx-platform,angelapper/edx-platform,chudaol/edx-platform,analyseuc3m/ANALYSE-v1,louyihua/edx-platform,devs1991/test_edx_docmode,vikas1885/test1,peterm-itr/edx-platform,IndonesiaX/edx-platform,Edraak/edx-platform,morenopc/edx-platform,ampax/edx-platform,wwj718/ANALYSE,caesar2164/edx-platform,chudaol/edx-platform,chrisndodge/edx-platform,cpennington/edx-platform,eestay/edx-platform,waheedahmed/edx-platform,zerobatu/edx-platform,Stanford-Online/edx-platform,devs1991/test_edx_docmode,chauhanhardik/populo,benpatterson/edx-platform,zubair-arbi/edx-platform,cognitiveclass/edx-platform,stvstnfrd/edx-platform,Edraak/edx-platform,WatanabeYasumasa/edx-platform,appliedx/edx-platform,olexiim/edx-platform,nikolas/edx-platform,marcore/edx-platform,franosincic/edx-platform,cpennington/edx-platform,CourseTalk/edx-platform,appliedx/edx-platform,gsehub/edx-platform,tiagochiavericosta/edx-platform,ovnicraft/edx-platform,procangroup/edx-platform,chrisndodge/edx-platform,mbareta/edx-platform-ft,waheedahmed/edx-platform,jbzdak/edx-platform,hmcmooc/muddx-platform,pomegranited/edx-platform,kmoocdev/edx-platform,xuxiao19910803/edx-platform,pabloborrego93/edx-platform,torchingloom/edx-platform,JioEducation/edx-platform,jswope00/griffinx,MSOpenTech/edx-platform,B-MOOC/edx-platform,LICEF/edx-platform,hkawasaki/kawasaki-aio8-0,olexiim/edx-platform,IONISx/edx-platform,UOMx/edx-platform,y12uc231/edx-platform,eduNEXT/edx-platform,kxliugang/edx-platform,jonathan-beard/edx-platform,wwj718/ANALYSE,motion2015/edx-platform,fly19890211/edx-platform,UXE/local-edx,ubc/edx-platform,jzoldak/edx-platform,vikas1885/test1,nagyistoce/edx-platform,antoviaque/edx-platform,Unow/edx-platform,openfun/edx-platform,mcgachey/edx-platform,dsajkl/123,ampax/edx-platform,zofuthan/edx-platform,jbzdak/edx-platform,Ayub-Khan/edx-platform,jswope00/GAI,miptliot/edx-platform,louyihua/edx-platform,Shrhawk/edx-platform,torchingloom/edx-platform,cselis86/edx-platform,xuxiao19910803/edx,chauhanhardik/populo,jamesblunt/edx-platform,pku9104038/edx-platform,nanolearning/edx-platform,jazkarta/edx-platform-for-isc,jjmiranda/edx-platform,4eek/edx-platform,Softmotions/edx-platform,lduarte1991/edx-platform,nttks/jenkins-test,antoviaque/edx-platform,beacloudgenius/edx-platform,Endika/edx-platform,AkA84/edx-platform,LearnEra/LearnEraPlaftform,valtech-mooc/edx-platform,appliedx/edx-platform,simbs/edx-platform,J861449197/edx-platform,don-github/edx-platform,ak2703/edx-platform,olexiim/edx-platform,pabloborrego93/edx-platform,peterm-itr/edx-platform,dcosentino/edx-platform,iivic/BoiseStateX,ESOedX/edx-platform,rismalrv/edx-platform,cyanna/edx-platform,martynovp/edx-platform,shubhdev/openedx,eduNEXT/edx-platform,utecuy/edx-platform,Livit/Livit.Learn.EdX,defance/edx-platform,eduNEXT/edunext-platform,caesar2164/edx-platform,bitifirefly/edx-platform,LICEF/edx-platform,jswope00/griffinx,eemirtekin/edx-platform,jswope00/GAI,shashank971/edx-platform,nttks/jenkins-test,nttks/edx-platform,simbs/edx-platform,mushtaqak/edx-platform,mbareta/edx-platform-ft,antoviaque/edx-platform,zerobatu/edx-platform,shubhdev/edx-platform,dkarakats/edx-platform,ak2703/edx-platform,Ayub-Khan/edx-platform,cselis86/edx-platform,sudheerchintala/LearnEraPlatForm,BehavioralInsightsTeam/edx-platform,EDUlib/edx-platform,analyseuc3m/ANALYSE-v1,Edraak/edx-platform,atsolakid/edx-platform,cselis86/edx-platform,vikas1885/test1,atsolakid/edx-platform,bigdatauniversity/edx-platform,miptliot/edx-platform,deepsrijit1105/edx-platform,rue89-tech/edx-platform,shurihell/testasia,ahmadiga/min_edx,marcore/edx-platform,devs1991/test_edx_docmode,Livit/Livit.Learn.EdX,4eek/edx-platform,appliedx/edx-platform,TeachAtTUM/edx-platform,zadgroup/edx-platform,Lektorium-LLC/edx-platform,bitifirefly/edx-platform,fly19890211/edx-platform,arifsetiawan/edx-platform,ampax/edx-platform-backup,arbrandes/edx-platform,wwj718/ANALYSE,iivic/BoiseStateX,DefyVentures/edx-platform,knehez/edx-platform,motion2015/edx-platform,devs1991/test_edx_docmode,RPI-OPENEDX/edx-platform,mushtaqak/edx-platform,mjirayu/sit_academy,jamiefolsom/edx-platform,naresh21/synergetics-edx-platform,kamalx/edx-platform,hmcmooc/muddx-platform,openfun/edx-platform,tiagochiavericosta/edx-platform,BehavioralInsightsTeam/edx-platform,kursitet/edx-platform,antoviaque/edx-platform,angelapper/edx-platform,B-MOOC/edx-platform,xuxiao19910803/edx,carsongee/edx-platform,edx-solutions/edx-platform,SivilTaram/edx-platform,edx-solutions/edx-platform,CourseTalk/edx-platform,ZLLab-Mooc/edx-platform,pepeportela/edx-platform,jzoldak/edx-platform,cognitiveclass/edx-platform,yokose-ks/edx-platform,knehez/edx-platform,mahendra-r/edx-platform,Endika/edx-platform,EDUlib/edx-platform,franosincic/edx-platform,antonve/s4-project-mooc,ferabra/edx-platform,kamalx/edx-platform,etzhou/edx-platform,mtlchun/edx,kmoocdev/edx-platform,mushtaqak/edx-platform,chudaol/edx-platform,Unow/edx-platform,andyzsf/edx,jonathan-beard/edx-platform,amir-qayyum-khan/edx-platform,mcgachey/edx-platform,mahendra-r/edx-platform,EDUlib/edx-platform,inares/edx-platform,louyihua/edx-platform,dsajkl/123,AkA84/edx-platform,martynovp/edx-platform,jolyonb/edx-platform,AkA84/edx-platform,dsajkl/123,dsajkl/123,cyanna/edx-platform,kamalx/edx-platform,jswope00/griffinx,mbareta/edx-platform-ft,xuxiao19910803/edx,inares/edx-platform,jazkarta/edx-platform-for-isc,jonathan-beard/edx-platform,pomegranited/edx-platform,cyanna/edx-platform,hamzehd/edx-platform,zadgroup/edx-platform,romain-li/edx-platform,arifsetiawan/edx-platform,DNFcode/edx-platform,UXE/local-edx,teltek/edx-platform,mjirayu/sit_academy,ahmedaljazzar/edx-platform,wwj718/edx-platform,jswope00/griffinx,jelugbo/tundex,zadgroup/edx-platform,morenopc/edx-platform,ampax/edx-platform-backup,wwj718/edx-platform,chand3040/cloud_that,jazkarta/edx-platform,xingyepei/edx-platform,sudheerchintala/LearnEraPlatForm,jamiefolsom/edx-platform,arifsetiawan/edx-platform,IndonesiaX/edx-platform,amir-qayyum-khan/edx-platform,raccoongang/edx-platform,vasyarv/edx-platform,hamzehd/edx-platform,beacloudgenius/edx-platform,ubc/edx-platform,a-parhom/edx-platform,hkawasaki/kawasaki-aio8-1,Edraak/edraak-platform,cselis86/edx-platform,naresh21/synergetics-edx-platform,ovnicraft/edx-platform,prarthitm/edxplatform,bigdatauniversity/edx-platform,DNFcode/edx-platform,solashirai/edx-platform,fintech-circle/edx-platform,deepsrijit1105/edx-platform,miptliot/edx-platform,cecep-edu/edx-platform,zhenzhai/edx-platform,benpatterson/edx-platform,naresh21/synergetics-edx-platform,hkawasaki/kawasaki-aio8-0,xinjiguaike/edx-platform,bdero/edx-platform,kursitet/edx-platform,ahmedaljazzar/edx-platform,vikas1885/test1,chauhanhardik/populo_2,msegado/edx-platform,tiagochiavericosta/edx-platform,chand3040/cloud_that,RPI-OPENEDX/edx-platform,y12uc231/edx-platform,shabab12/edx-platform,andyzsf/edx,J861449197/edx-platform,mcgachey/edx-platform,shubhdev/edxOnBaadal,rismalrv/edx-platform,DefyVentures/edx-platform,jbassen/edx-platform,beni55/edx-platform,arbrandes/edx-platform,bitifirefly/edx-platform,a-parhom/edx-platform,devs1991/test_edx_docmode,jswope00/griffinx,waheedahmed/edx-platform,shubhdev/edx-platform,jamesblunt/edx-platform,yokose-ks/edx-platform,UOMx/edx-platform,auferack08/edx-platform,LearnEra/LearnEraPlaftform,LICEF/edx-platform,doismellburning/edx-platform,cyanna/edx-platform,jruiperezv/ANALYSE,jbassen/edx-platform,cecep-edu/edx-platform,cognitiveclass/edx-platform,LearnEra/LearnEraPlaftform,dcosentino/edx-platform,edx/edx-platform,dsajkl/reqiop,eduNEXT/edunext-platform,torchingloom/edx-platform,AkA84/edx-platform,SivilTaram/edx-platform,Softmotions/edx-platform,adoosii/edx-platform,nagyistoce/edx-platform,playm2mboy/edx-platform,4eek/edx-platform,morenopc/edx-platform,mjirayu/sit_academy,ampax/edx-platform,mcgachey/edx-platform,Edraak/circleci-edx-platform,franosincic/edx-platform,defance/edx-platform,alexthered/kienhoc-platform,valtech-mooc/edx-platform,yokose-ks/edx-platform,sudheerchintala/LearnEraPlatForm,MakeHer/edx-platform,rue89-tech/edx-platform,jamesblunt/edx-platform,marcore/edx-platform,alexthered/kienhoc-platform,Lektorium-LLC/edx-platform,polimediaupv/edx-platform,proversity-org/edx-platform,longmen21/edx-platform,kmoocdev/edx-platform,shashank971/edx-platform,jjmiranda/edx-platform,solashirai/edx-platform,MakeHer/edx-platform,tiagochiavericosta/edx-platform,chauhanhardik/populo_2,zadgroup/edx-platform,10clouds/edx-platform,nanolearningllc/edx-platform-cypress-2,nanolearningllc/edx-platform-cypress-2,eduNEXT/edunext-platform,solashirai/edx-platform,IONISx/edx-platform,kmoocdev2/edx-platform,ahmadio/edx-platform,adoosii/edx-platform,mahendra-r/edx-platform,edx/edx-platform,vismartltd/edx-platform,jonathan-beard/edx-platform,eestay/edx-platform,IONISx/edx-platform,ferabra/edx-platform,hamzehd/edx-platform,ahmedaljazzar/edx-platform,don-github/edx-platform,vismartltd/edx-platform,shubhdev/edxOnBaadal,OmarIthawi/edx-platform,polimediaupv/edx-platform,jamesblunt/edx-platform,motion2015/edx-platform,knehez/edx-platform,ovnicraft/edx-platform,knehez/edx-platform,leansoft/edx-platform,iivic/BoiseStateX,appsembler/edx-platform,polimediaupv/edx-platform,nanolearningllc/edx-platform-cypress-2,dsajkl/123,bdero/edx-platform,beni55/edx-platform,chand3040/cloud_that,playm2mboy/edx-platform,jelugbo/tundex,jazkarta/edx-platform-for-isc,DNFcode/edx-platform,TeachAtTUM/edx-platform,xuxiao19910803/edx,analyseuc3m/ANALYSE-v1,jazkarta/edx-platform,Livit/Livit.Learn.EdX,shubhdev/edx-platform,waheedahmed/edx-platform,nikolas/edx-platform,ESOedX/edx-platform,shabab12/edx-platform,bdero/edx-platform,B-MOOC/edx-platform,DefyVentures/edx-platform,Edraak/circleci-edx-platform,jazztpt/edx-platform,RPI-OPENEDX/edx-platform,itsjeyd/edx-platform,Kalyzee/edx-platform,wwj718/ANALYSE,edry/edx-platform,Ayub-Khan/edx-platform,Semi-global/edx-platform,eestay/edx-platform,nanolearningllc/edx-platform-cypress-2,Lektorium-LLC/edx-platform,JCBarahona/edX,pomegranited/edx-platform,halvertoluke/edx-platform,pku9104038/edx-platform,sameetb-cuelogic/edx-platform-test,halvertoluke/edx-platform,pepeportela/edx-platform,chand3040/cloud_that,rhndg/openedx,a-parhom/edx-platform,ZLLab-Mooc/edx-platform,rhndg/openedx,sameetb-cuelogic/edx-platform-test,Kalyzee/edx-platform,jazztpt/edx-platform,jazkarta/edx-platform,alu042/edx-platform,jruiperezv/ANALYSE,synergeticsedx/deployment-wipro,proversity-org/edx-platform,polimediaupv/edx-platform,LICEF/edx-platform,Ayub-Khan/edx-platform,deepsrijit1105/edx-platform,knehez/edx-platform,lduarte1991/edx-platform,nagyistoce/edx-platform,rhndg/openedx,eemirtekin/edx-platform,beni55/edx-platform,miptliot/edx-platform,zofuthan/edx-platform,ampax/edx-platform,beni55/edx-platform,jazkarta/edx-platform,xingyepei/edx-platform,unicri/edx-platform,ak2703/edx-platform,abdoosh00/edraak,Ayub-Khan/edx-platform,tanmaykm/edx-platform,zerobatu/edx-platform,cognitiveclass/edx-platform,MakeHer/edx-platform,IndonesiaX/edx-platform,doismellburning/edx-platform,zhenzhai/edx-platform,mitocw/edx-platform,LearnEra/LearnEraPlaftform,itsjeyd/edx-platform,jbassen/edx-platform,dkarakats/edx-platform,adoosii/edx-platform,msegado/edx-platform,doganov/edx-platform,WatanabeYasumasa/edx-platform,playm2mboy/edx-platform,jolyonb/edx-platform,zubair-arbi/edx-platform,hkawasaki/kawasaki-aio8-0,Endika/edx-platform,beacloudgenius/edx-platform,zofuthan/edx-platform,rhndg/openedx,rue89-tech/edx-platform,xingyepei/edx-platform,Kalyzee/edx-platform,rue89-tech/edx-platform,UOMx/edx-platform,utecuy/edx-platform,playm2mboy/edx-platform,JioEducation/edx-platform,eemirtekin/edx-platform,shabab12/edx-platform,chauhanhardik/populo,abdoosh00/edraak,nttks/jenkins-test,doganov/edx-platform,msegado/edx-platform,antonve/s4-project-mooc,xinjiguaike/edx-platform,ahmadiga/min_edx | """
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return self.browser.title == "edX"
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
Fix find courses page title in bok choy test suite | """
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return "edX" in self.browser.title
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
| <commit_before>"""
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return self.browser.title == "edX"
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
<commit_msg>Fix find courses page title in bok choy test suite<commit_after> | """
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return "edX" in self.browser.title
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
| """
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return self.browser.title == "edX"
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
Fix find courses page title in bok choy test suite"""
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return "edX" in self.browser.title
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
| <commit_before>"""
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return self.browser.title == "edX"
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
<commit_msg>Fix find courses page title in bok choy test suite<commit_after>"""
Find courses page (main page of the LMS).
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import BrokenPromise
from . import BASE_URL
class FindCoursesPage(PageObject):
"""
Find courses page (main page of the LMS).
"""
url = BASE_URL
def is_browser_on_page(self):
return "edX" in self.browser.title
@property
def course_id_list(self):
"""
Retrieve the list of available course IDs
on the page.
"""
return self.css_map('article.course', lambda el: el['id'])
|
ad4b972667e9111c403c1d3726b2cde87fcbc88e | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
)
| #!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
use_2to3=True,
)
| Use 2to3 for Python 3 | Use 2to3 for Python 3
| Python | mit | tehmaze/natural | #!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
)
Use 2to3 for Python 3 | #!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
use_2to3=True,
)
| <commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
)
<commit_msg>Use 2to3 for Python 3<commit_after> | #!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
use_2to3=True,
)
| #!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
)
Use 2to3 for Python 3#!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
use_2to3=True,
)
| <commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
)
<commit_msg>Use 2to3 for Python 3<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(name='natural',
version='0.1.4',
description='Convert data to their natural (human-readable) format',
long_description='''
Example Usage
=============
Basic usage::
>>> from natural.file import accessed
>>> print accessed(__file__)
just now
We speak your language (with `your support`_)::
>>> import locale
>>> locale.setlocale(locale.LC_MESSAGES, 'nl_NL')
>>> print accessed(__file__)
zojuist
Bugs/Features
=============
You can issue a ticket in GitHub: https://github.com/tehmaze/natural/issues
Documentation
=============
The project documentation can be found at http://natural.rtfd.org/
.. _your support: http://natural.readthedocs.org/en/latest/locales.html
''',
author='Wijnand Modderman-Lenstra',
author_email='maze@pyth0n.org',
license='MIT',
keywords='natural data date file number size',
url='https://github.com/tehmaze/natural',
packages=['natural'],
package_data={'natural': ['locale/*/LC_MESSAGES/*.mo']},
use_2to3=True,
)
|
ffcc9d8c87ddc7fd386dd51c1fca1ac8b62d5828 | setup.py | setup.py | #!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.6,<1.8',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.6,<1.8',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
| #!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.8,<1.9',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.8,<1.9',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
| Update Django requirements for 1.8 | Update Django requirements for 1.8
| Python | bsd-3-clause | MasonM/django-elect,MasonM/django-elect,MasonM/django-elect | #!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.6,<1.8',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.6,<1.8',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
Update Django requirements for 1.8 | #!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.8,<1.9',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.8,<1.9',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
| <commit_before>#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.6,<1.8',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.6,<1.8',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
<commit_msg>Update Django requirements for 1.8<commit_after> | #!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.8,<1.9',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.8,<1.9',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
| #!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.6,<1.8',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.6,<1.8',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
Update Django requirements for 1.8#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.8,<1.9',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.8,<1.9',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
| <commit_before>#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.6,<1.8',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.6,<1.8',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
<commit_msg>Update Django requirements for 1.8<commit_after>#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name='django-elect',
version='0.1',
description='A simple voting app for Django',
license='BSD',
author='Mason Malone',
author_email='mason.malone@gmail.com',
url='http://bitbucket.org/MasonM/django-elect/',
packages=find_packages(exclude=['example_project', 'example_project.*']),
include_package_data=True,
tests_require=[
'django>=1.8,<1.9',
'freezegun',
'unittest2',
],
test_suite='runtests.runtests',
install_requires=[
'django>=1.8,<1.9',
'setuptools',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Framework :: Django',
'Programming Language :: Python',
'Programming Language :: JavaScript',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.